Пример #1
0
    def test_to_dict(self):
        c_spill = [point_line_release_spill(self.num_elements,
                   self.start_position, self.start_time) for i in
                   range(2)]

        u_spill = [point_line_release_spill(self.num_elements,
                   self.start_position2, self.start_time2) for i in
                   range(2)]

        scp = SpillContainerPair(True)

        for sp_tuple in zip(c_spill, u_spill):
            scp += sp_tuple

        dict_ = scp.to_dict()

        for key in dict_.keys():
            if key == 'certain_spills':
                enum_spill = c_spill
            elif key == 'uncertain_spills':
                enum_spill = u_spill

            for (i, spill) in enumerate(enum_spill):
                assert dict_[key]['items'][i][0] \
                    == '{0}.{1}'.format(spill.__module__,
                        spill.__class__.__name__)
                assert dict_[key]['items'][i][1] == i
Пример #2
0
    def test_to_dict(self, json_):
        c_spill = [point_line_release_spill(self.num_elements,
                   self.start_position, self.start_time) for i in
                   range(2)]

        u_spill = [point_line_release_spill(self.num_elements,
                   self.start_position2, self.start_time2) for i in
                   range(2)]

        scp = SpillContainerPair(True)

        for sp_tuple in zip(c_spill, u_spill):
            scp += sp_tuple

        toserial = scp.to_dict()
        assert 'spills' in toserial
        assert 'uncertain_spills' in toserial

        for key in ('spills', 'uncertain_spills'):
            if key == 'spills':
                check = c_spill
            else:
                check = u_spill

            alltrue = [check[ix].id == spill['id'] \
                                for ix, spill in enumerate(toserial[key])]
            assert all(alltrue)
            alltrue = [check[ix].obj_type == spill['obj_type'] \
                                for ix, spill in enumerate(toserial[key])]
            assert all(alltrue)
Пример #3
0
    def test_SpillContainerPair_uncertainty(self):
        'test uncertainty property'

        u_scp = SpillContainerPair(True)
        u_scp.uncertain = False
        assert not u_scp.uncertain
        assert not hasattr(u_scp, '_u_spill_container')

        u_scp.uncertain = True
        assert u_scp.uncertain
        assert hasattr(u_scp, '_u_spill_container')
Пример #4
0
    def test_spill_by_index(self):
        'test spill_by_index returns the correct spill object'
        spill = [point_line_release_spill(self.num_elements,
                                          self.start_position, self.start_time),
                 point_line_release_spill(self.num_elements,
                                          self.start_position, self.start_time)]

        scp = SpillContainerPair(True)
        scp += spill[0]
        scp += spill[1]
        for ix in range(2):
            assert scp.spill_by_index(ix) is spill[ix]
            u_spill = scp.spill_by_index(ix, uncertain=True)
            assert u_spill is not spill[ix]
            assert scp.items()[1].spills[ix] is u_spill
Пример #5
0
    def __restore__(self, time_step, start_time, duration, weathering_substeps, map, uncertain, cache_enabled):
        """
        Take out initialization that does not register the callback here.
        This is because new_from_dict will use this to restore the model _state
        when doing a midrun persistence.
        """

        # making sure basic stuff is in place before properties are set
        self.environment = OrderedCollection(dtype=Environment)
        self.movers = OrderedCollection(dtype=Mover)
        self.weatherers = OrderedCollection(dtype=Weatherer)

        # contains both certain/uncertain spills
        self.spills = SpillContainerPair(uncertain)

        self._cache = gnome.utilities.cache.ElementCache()
        self._cache.enabled = cache_enabled

        # list of output objects
        self.outputters = OrderedCollection(dtype=Outputter)

        # default to now, rounded to the nearest hour
        self._start_time = start_time
        self._duration = duration
        self.weathering_substeps = weathering_substeps
        self._map = map
        self.time_step = time_step  # this calls rewind() !
Пример #6
0
    def test_release_particles(self):
        '''test that the 'id' for uncertain spill container's data is
        starting from 0'''
        spill = [point_line_release_spill(self.num_elements,
                 self.start_position, self.start_time) for i in
                 range(2)]

        scp = SpillContainerPair(True)
        scp += spill[0]
        scp += spill[1]
        for sc in scp.items():
            sc.prepare_for_model_run(windage_at)
            # model sets this for each step
            sc.current_time_stamp = self.start_time
            sc.release_elements(100, self.start_time)

        for key in ['id', 'spill_num', 'age']:
            c_val = scp.LE(key)
            u_val = scp.LE(key, 'uncertain')
            assert np.all(c_val == u_val)
Пример #7
0
    def test_exception_tuple(self):
        """
        tests that spills can be added to SpillContainerPair object
        """

        spill = point_line_release_spill(self.num_elements,
                                         self.start_position, self.start_time)
        sp2 = point_line_release_spill(self.num_elements, self.start_position2,
                                       self.start_time2)
        scp = SpillContainerPair(True)

        with raises(ValueError):
            scp += (spill, sp2, spill)
Пример #8
0
    def test_add_spill(self):
        spill = [
            point_line_release_spill(self.num_elements, self.start_position,
                                     self.start_time) for i in range(2)
        ]

        scp = SpillContainerPair(False)
        scp += (spill[0], )
        scp += spill[1]
        for sp_ix in zip(scp._spill_container.spills, range(len(spill))):
            spill_ = sp_ix[0]
            index = sp_ix[1]
            assert spill_.id == spill[index].id
Пример #9
0
def test_exceptions(output_filename):
    spill_pair = SpillContainerPair()

    print "output_filename:", output_filename
    # begin tests
    netcdf = NetCDFOutput(output_filename, which_data='all')
    netcdf.rewind()  # delete temporary files

    with raises(TypeError):
        # need to pass in model start time
        netcdf.prepare_for_model_run(num_time_steps=4)

    with raises(TypeError):
        # need to pass in model start time and spills
        netcdf.prepare_for_model_run()

    with raises(ValueError):
        # need a cache object
        netcdf.write_output(0)

    with raises(ValueError):
        netcdf.which_data = 'some random string'

    # changed renderer and netcdf ouputter to delete old files in
    # prepare_for_model_run() rather than rewind()
    # -- rewind() was getting called a lot
    # -- before there was time to change the ouput file names, etc.
    # So for this unit test, there should be no exception if we do it twice.
    netcdf.prepare_for_model_run(model_start_time=datetime.now(),
                                 spills=spill_pair,
                                 num_time_steps=4)
    netcdf.prepare_for_model_run(model_start_time=datetime.now(),
                                 spills=spill_pair,
                                 num_time_steps=4)

    with raises(AttributeError):
        'cannot change after prepare_for_model_run has been called'
        netcdf.prepare_for_model_run(model_start_time=datetime.now(),
                                     spills=spill_pair,
                                     num_time_steps=4)
        netcdf.which_data = 'most'
Пример #10
0
    def test_add_spillpair(self):
        c_spill = [
            point_line_release_spill(self.num_elements, self.start_position,
                                     self.start_time) for i in range(2)
        ]

        u_spill = [
            point_line_release_spill(self.num_elements, self.start_position2,
                                     self.start_time2) for i in range(2)
        ]

        scp = SpillContainerPair(True)

        for sp_tuple in zip(c_spill, u_spill):
            scp += sp_tuple

        for sp, idx in zip(scp._spill_container.spills, range(len(c_spill))):
            assert sp.id == c_spill[idx].id

        for sp, idx in zip(scp._u_spill_container.spills, range(len(c_spill))):
            assert sp.id == u_spill[idx].id
Пример #11
0
    def test_rewind_change_spill_attribute(self):
        '''
        check that if an attribute of forcast spillcontainer is updated,
        the uncertain spill container creates a new copy of uncertain spills
        '''
        num_elements = 100
        release_time = datetime(2012, 1, 1, 12)
        start_position = (23.0, -78.5, 0.0)
        scp = SpillContainerPair(uncertain=True)

        scp += point_line_release_spill(num_elements, start_position,
                                        release_time)
        (forecast_sc, uncertain_sc) = scp.items()
        assert forecast_sc.spills == uncertain_sc.spills

        forecast_sc.spills[0].release_time = release_time + timedelta(hours=1)
        (forecast_sc, uncertain_sc) = scp.items()
        assert forecast_sc.spills != uncertain_sc.spills

        scp.rewind()
        (forecast_sc, uncertain_sc) = scp.items()
        assert forecast_sc.spills == uncertain_sc.spills
Пример #12
0
    def test_rewind_change_spill_attribute(self):
        '''
        check that if an attribute of forcast spillcontainer is updated,
        the uncertain spill container creates a new copy of uncertain spills
        '''
        num_elements = 100
        release_time = datetime(2012, 1, 1, 12)
        start_position = (23.0, -78.5, 0.0)
        scp = SpillContainerPair(uncertain=True)

        scp += point_line_release_spill(num_elements, start_position,
                                        release_time)
        (forecast_sc, uncertain_sc) = scp.items()
        assert forecast_sc.spills == uncertain_sc.spills

        forecast_sc.spills[0].release_time = release_time + timedelta(hours=1)
        (forecast_sc, uncertain_sc) = scp.items()
        assert forecast_sc.spills != uncertain_sc.spills

        scp.rewind()
        (forecast_sc, uncertain_sc) = scp.items()
        assert forecast_sc.spills == uncertain_sc.spills
Пример #13
0
class Model(serializable.Serializable):

    """ 
    PyGNOME Model Class
    
    """

    _update = [
        'time_step',
        'start_time',
        'duration',
        'uncertain',
        'movers',
        'environment',
        'spills',
        'map',
        'outputters',
        'cache_enabled',
        ]
    _create = []
    _create.extend(_update)
    state = copy.deepcopy(serializable.Serializable.state)
    state.add(create=_create, update=_update)  # no need to copy parent's state in tis case

    @classmethod
    def new_from_dict(cls, dict_):
        """
        Restore model from previously persisted state
        """

        l_env = dict_.pop('environment')
        l_out = dict_.pop('outputters')
        l_movers = dict_.pop('movers')

        c_spills = dict_.pop('certain_spills')
        if 'uncertain_spills' in dict_.keys():
            u_spills = dict_.pop('uncertain_spills')
            l_spills = zip(c_spills, u_spills)
        else:
            l_spills = c_spills

        model = object.__new__(cls)
        model.__restore__(**dict_)
        [model.environment.add(obj) for obj in l_env]
        [model.outputters.add(obj) for obj in l_out]
        [model.spills.add(obj) for obj in l_spills]
        [model.movers.add(obj) for obj in l_movers]

        # register callback with OrderedCollection

        model.movers.register_callback(model._callback_add_mover, ('add'
                , 'replace'))

        return model

    def __init__(
        self,
        time_step=timedelta(minutes=15),
        start_time=round_time(datetime.now(), 3600),
        duration=timedelta(days=1),
        map=gnome.map.GnomeMap(),
        uncertain=False,
        cache_enabled=False,
        id=None,
        ):
        """ 
        Initializes a model. All arguments have a default.

        :param time_step=timedelta(minutes=15): model time step in seconds or as a timedelta object
        :param start_time=datetime.now(): start time of model, datetime object. default to now, rounded to the nearest hour
        :param duration=timedelta(days=1): how long to run the model, a timedelta object
        :param map=gnome.map.GnomeMap(): the land-water map, default is a map with no land-water
        :param uncertain=False: flag for setting uncertainty
        :param cache_enabled=False: flag for setting whether the mocel should cache results to disk.
        :param id: Unique Id identifying the newly created mover (a UUID as a string). 
                   This is used when loading an object from a persisted model
        """
        self.__restore__(
            time_step,
            start_time,
            duration,
            map,
            uncertain,
            cache_enabled,
            id,
            )

        # register callback with OrderedCollection

        self.movers.register_callback(self._callback_add_mover, ('add',
                'replace'))

    def __restore__(
        self,
        time_step,
        start_time,
        duration,
        map,
        uncertain,
        cache_enabled,
        id,
        ):
        """
        Take out initialization that does not register the callback here.
        This is because new_from_dict will use this to restore the model state
        when doing a midrun persistence.
        """

        # making sure basic stuff is in place before properties are set

        self.environment = OrderedCollection(dtype=Environment)
        self.movers = OrderedCollection(dtype=Mover)
        self.spills = SpillContainerPair(uncertain)  # contains both certain/uncertain spills
        self._cache = gnome.utilities.cache.ElementCache()
        self._cache.enabled = cache_enabled

        self.outputters = \
            OrderedCollection(dtype=gnome.outputter.Outputter)  # list of output objects
        self._start_time = start_time  # default to now, rounded to the nearest hour
        self._duration = duration
        self._map = map
        self.time_step = time_step  # this calls rewind() !

        self._gnome_id = gnome.GnomeId(id)

    def reset(self, **kwargs):
        """
        Resets model to defaults -- Caution -- clears all movers, spills, etc.

        Takes same keyword arguments as __init__
        """

        self.__init__(**kwargs)

    def rewind(self):
        """
        Rewinds the model to the beginning (start_time)
        """

        # # fixme: do the movers need re-setting? -- or wait for prepare_for_model_run?

        self.current_time_step = -1  # start at -1
        self.model_time = self._start_time

        # # note: this may be redundant -- they will get reset in setup_model_run() anyway..

        self.spills.rewind()
        gnome.utilities.rand.seed(1)  # set rand before each call so windages are set correctly

        # clear the cache:

        self._cache.rewind()
        for outputter in self.outputters:
            outputter.rewind()

#    def write_from_cache(self, filetype='netcdf', time_step='all'):
#        """
#        write the already-cached data to an output files.
#        """

    # ## Assorted properties

    @property
    def uncertain(self):
        return self.spills.uncertain

    @uncertain.setter
    def uncertain(self, uncertain_value):
        """
        only if uncertainty switch is toggled, then restart model
        """

        if self.spills.uncertain != uncertain_value:
            self.spills.uncertain = uncertain_value  # update uncertainty
            self.rewind()

    @property
    def cache_enabled(self):
        return self._cache.enabled

    @cache_enabled.setter
    def cache_enabled(self, enabled):
        self._cache.enabled = enabled

    @property
    def id(self):
        return self._gnome_id.id

    @property
    def start_time(self):
        return self._start_time

    @start_time.setter
    def start_time(self, start_time):
        self._start_time = start_time
        self.rewind()

    @property
    def time_step(self):
        return self._time_step

    @time_step.setter
    def time_step(self, time_step):
        """
        sets the time step, and rewinds the model

        :param time_step: the timestep as a timedelta object or integer seconds.
        """

        try:
            self._time_step = time_step.total_seconds()
        except AttributeError:
            # not a timedelta object -- assume it's in seconds.
            self._time_step = int(time_step)

        # there is a zeroth time step
        self._num_time_steps = int(self._duration.total_seconds()
                                   // self._time_step) + 1
        self.rewind()

    @property
    def current_time_step(self):
        return self._current_time_step

    @current_time_step.setter
    def current_time_step(self, step):
        self.model_time = self._start_time + timedelta(seconds=step
                * self.time_step)
        self._current_time_step = step

    @property
    def duration(self):
        return self._duration

    @duration.setter
    def duration(self, duration):
        if duration < self._duration:  # only need to rewind if shorter than it was...

            # # fixme: actually, only need to rewide is current model time is byond new time...

            self.rewind()
        self._duration = duration
        self._num_time_steps = int(self._duration.total_seconds()
                                   // self.time_step) + 1  # there is a zeroth time step

    @property
    def map(self):
        return self._map

    @map.setter
    def map(self, map_in):
        self._map = map_in
        self.rewind()

    @property
    def num_time_steps(self):
        return self._num_time_steps

    def setup_model_run(self):
        """
        Sets up each mover for the model run

        """

        self.spills.rewind()  # why is rewind for spills here?

        for outputter in self.outputters:
            outputter.prepare_for_model_run(model_start_time=self.start_time,
                                            cache=self._cache,
                                            uncertain=self.uncertain,
                                            spills=self.spills)

        array_types = {}
        for mover in self.movers:
            mover.prepare_for_model_run()
            array_types.update(mover.array_types)

        for sc in self.spills.items():
            sc.prepare_for_model_run(array_types)

    def setup_time_step(self):
        """
        sets up everything for the current time_step:

        right now only prepares the movers -- maybe more later?.
        """

        # initialize movers differently if model uncertainty is on

        for mover in self.movers:
            for sc in self.spills.items():
                mover.prepare_for_model_step(sc, self.time_step,
                        self.model_time)
        for outputter in self.outputters:
            outputter.prepare_for_model_step(self.time_step, self.model_time)

    def move_elements(self):
        """

        Moves elements:
         - loops through all the movers. and moves the elements
         - sets new_position array for each spill
         - calls the beaching code to beach the elements that need beaching.
         - sets the new position
        """

        # # if there are no spills, there is nothing to do:

        if len(self.spills) > 0:  # can this check be removed?
            for sc in self.spills.items():
                if sc.num_released > 0:  # can this check be removed?

                    # possibly refloat elements

                    self.map.refloat_elements(sc, self.time_step)

                    # reset next_positions

                    (sc['next_positions'])[:] = sc['positions']

                    # loop through the movers

                    for mover in self.movers:
                        delta = mover.get_move(sc, self.time_step,
                                self.model_time)
                        sc['next_positions'] += delta

                    self.map.beach_elements(sc)

                    # the final move to the new positions

                    (sc['positions'])[:] = sc['next_positions']

    def step_is_done(self):
        """
        Loop through movers and call model_step_is_done
        """

        for mover in self.movers:
            for sc in self.spills.items():
                mover.model_step_is_done(sc)
        for sc in self.spills.items():
            sc.model_step_is_done()

        for outputter in self.outputters:
            outputter.model_step_is_done()

    def write_output(self):
        output_info = {'step_num': self.current_time_step}
        for outputter in self.outputters:
            if self.current_time_step == self.num_time_steps - 1:
                output = outputter.write_output(self.current_time_step, True)
            else:
                output = outputter.write_output(self.current_time_step)
            if output is not None:
                output_info.update(output)
        return output_info

    def step(self):
        """
        Steps the model forward (or backward) in time. Needs testing for
        hind casting.
        """

        for sc in self.spills.items():
            # set the current time stamp only after current_time_step is
            # incremented and before the output is written. Set it to None here
            # just so we're not carrying around the old time_stamp
            sc.current_time_stamp = None

        # it gets incremented after this check
        if self.current_time_step >= self._num_time_steps - 1:
            raise StopIteration

        if self.current_time_step == -1:
            # that's all we need to do for the zeroth time step
            self.setup_model_run()
        else:
            self.setup_time_step()
            self.move_elements()
            self.step_is_done()

        self.current_time_step += 1

        # this is where the new step begins!
        # the elements released are during the time period:
        #    self.model_time + self.time_step
        # The else part of the loop computes values for data_arrays that
        # correspond with time_stamp:
        #    self.model_time + self.time_step
        # This is the current_time_stamp attribute of the SpillContainer
        #     [sc.current_time_stamp for sc in self.spills.items()]
        for sc in self.spills.items():
            sc.current_time_stamp = self.model_time
            sc.release_elements(self.time_step, self.model_time)

        # cache the results - current_time_step is incremented but the
        # current_time_stamp in spill_containers (self.spills) is not updated
        # till we go through the prepare_for_model_step
        self._cache.save_timestep(self.current_time_step, self.spills)
        output_info = self.write_output()
        return output_info

    def __iter__(self):
        """
        for compatibility with Python's iterator protocol

        rewinds the model and returns itself so it can be iterated over.
        """

        self.rewind()
        return self

    def next(self):
        """
        (This method here to satisfy Python's iterator and generator protocols)

        Simply calls model.step()

        :return: the step number
        """

        return self.step()

    def full_run(self, rewind=True, log=False):
        """
        Do a full run of the model.

        :param rewind=True: whether to rewind the model first -- defaults to True
                            if set to false, model will be run from the current
                            step to the end
        :returns: list of outputter info dicts

        """

        if rewind:
            self.rewind()

        # run the model

        output_data = []
        while True:
            try:
                results = self.step()
                if log:
                    print results
                output_data.append(results)
            except StopIteration:
                print 'Done with the model run'
                break
        return output_data

    def movers_to_dict(self):
        """
        call to_dict method of OrderedCollection object
        """

        return self.movers.to_dict()

    def environment_to_dict(self):
        """
        call to_dict method of OrderedCollection object
        """

        return self.environment.to_dict()

    def spills_to_dict(self):
        return self.spills.to_dict()

    def outputters_to_dict(self):
        """
        call to_dict method of OrderedCollection object
        """

        return self.outputters.to_dict()

    def map_to_dict(self):
        """
        create a tuple that contains: (type, object.id)
        """

        # dict_ = {'map': ("{0}.{1}".format(self.map.__module__, self.map.__class__.__name__), self.map.id)}

        return ('{0}.{1}'.format(self.map.__module__,
                self.map.__class__.__name__), self.map.id)

        # if self.output_map is not None:
        #    dict_.update({'output_map': ("{0}.{1}".format(self.output_map.__module__, self.output_map.__class__.__name__), self.output_map.id)})

    def _callback_add_mover(self, obj_added):
        """ callback after mover has been added """

        if isinstance(obj_added, WindMover):
            if obj_added.wind.id not in self.environment:
                self.environment += obj_added.wind

        if isinstance(obj_added, CatsMover):
            if obj_added.tide is not None and obj_added.tide.id \
                not in self.environment:
                self.environment += obj_added.tide

        self.rewind()  # rewind model if a new mover is added

    def __eq__(self, other):
        check = super(Model, self).__eq__(other)
        if check:

            # also check the data in spill_container object

            if type(self.spills) != type(other.spills):
                return False
            if self.spills != other.spills:
                return False

        return check

    def __ne__(self, other):
        """ 
        Compare inequality (!=) of two objects
        """

        if self == other:
            return False
        else:
            return True
Пример #14
0
 def test_init_SpillContainerPair(self):
     'All this does is test that it can be initialized'
     SpillContainerPair()
     SpillContainerPair(True)
     assert True
Пример #15
0
 def init_scp(self):
     scp = SpillContainerPair(uncertain=True)
     for s in self.s0:
         scp += s
     return scp
Пример #16
0
class Model(Serializable):
    "PyGNOME Model Class"
    _update = [
        "time_step",
        "weathering_substeps",
        "start_time",
        "duration",
        "uncertain",
        "movers",
        "weatherers",
        "environment",
        "spills",
        "map",
        "outputters",
        "cache_enabled",
    ]
    _create = []
    _create.extend(_update)
    _state = copy.deepcopy(Serializable._state)

    # no need to copy parent's _state in this case
    _state.add(create=_create, update=_update)

    @classmethod
    def new_from_dict(cls, dict_):
        "Restore model from previously persisted _state"
        l_env = dict_.pop("environment")
        l_out = dict_.pop("outputters")
        l_movers = dict_.pop("movers")
        l_weatherers = dict_.pop("weatherers")
        c_spills = dict_.pop("certain_spills")

        if "uncertain_spills" in dict_:
            u_spills = dict_.pop("uncertain_spills")
            l_spills = zip(c_spills, u_spills)
        else:
            l_spills = c_spills

        model = object.__new__(cls)
        model.__restore__(**dict_)
        [model.environment.add(obj) for obj in l_env]
        [model.outputters.add(obj) for obj in l_out]
        [model.spills.add(obj) for obj in l_spills]
        [model.movers.add(obj) for obj in l_movers]
        [model.weatherers.add(obj) for obj in l_weatherers]

        # register callback with OrderedCollection
        model.movers.register_callback(model._callback_add_mover, ("add", "replace"))

        model.weatherers.register_callback(model._callback_add_weatherer, ("add", "replace"))

        return model

    def __init__(
        self,
        time_step=timedelta(minutes=15),
        start_time=round_time(datetime.now(), 3600),
        duration=timedelta(days=1),
        weathering_substeps=1,
        map=gnome.map.GnomeMap(),
        uncertain=False,
        cache_enabled=False,
        id=None,
    ):
        """
        Initializes a model. All arguments have a default.

        :param time_step=timedelta(minutes=15): model time step in seconds
                                                or as a timedelta object
        :param start_time=datetime.now(): start time of model, datetime
                                          object. Rounded to the nearest hour.
        :param duration=timedelta(days=1): How long to run the model,
                                           a timedelta object.
        :param int weathering_substeps=1: How many weathering substeps to
                                          run inside a single model time step.
        :param map=gnome.map.GnomeMap(): The land-water map.
        :param uncertain=False: Flag for setting uncertainty.
        :param cache_enabled=False: Flag for setting whether the model should
                                    cache results to disk.
        :param id: Unique Id identifying the newly created mover (a UUID as a
                   string).  This is used when loading an object from a
                   persisted model
        """
        self.__restore__(time_step, start_time, duration, weathering_substeps, map, uncertain, cache_enabled)

        self._gnome_id = gnome.GnomeId(id)

        # register callback with OrderedCollection
        self.movers.register_callback(self._callback_add_mover, ("add", "replace"))

        self.weatherers.register_callback(self._callback_add_weatherer, ("add", "replace"))

    def __restore__(self, time_step, start_time, duration, weathering_substeps, map, uncertain, cache_enabled):
        """
        Take out initialization that does not register the callback here.
        This is because new_from_dict will use this to restore the model _state
        when doing a midrun persistence.
        """

        # making sure basic stuff is in place before properties are set
        self.environment = OrderedCollection(dtype=Environment)
        self.movers = OrderedCollection(dtype=Mover)
        self.weatherers = OrderedCollection(dtype=Weatherer)

        # contains both certain/uncertain spills
        self.spills = SpillContainerPair(uncertain)

        self._cache = gnome.utilities.cache.ElementCache()
        self._cache.enabled = cache_enabled

        # list of output objects
        self.outputters = OrderedCollection(dtype=Outputter)

        # default to now, rounded to the nearest hour
        self._start_time = start_time
        self._duration = duration
        self.weathering_substeps = weathering_substeps
        self._map = map
        self.time_step = time_step  # this calls rewind() !

    def reset(self, **kwargs):
        """
        Resets model to defaults -- Caution -- clears all movers, spills, etc.
        Takes same keyword arguments as __init__
        """
        self.__init__(**kwargs)

    def rewind(self):
        """
        Rewinds the model to the beginning (start_time)
        """

        # fixme: do the movers need re-setting? -- or wait for
        #        prepare_for_model_run?

        self.current_time_step = -1
        self.model_time = self._start_time

        # note: This may be redundant.  They will get reset in
        #       setup_model_run() anyway..

        self.spills.rewind()

        # set rand before each call so windages are set correctly
        gnome.utilities.rand.seed(1)

        # clear the cache:
        self._cache.rewind()

        for outputter in self.outputters:
            outputter.rewind()

    #    def write_from_cache(self, filetype='netcdf', time_step='all'):
    #        """
    #        write the already-cached data to an output files.
    #        """

    @property
    def uncertain(self):
        return self.spills.uncertain

    @uncertain.setter
    def uncertain(self, uncertain_value):
        """
        only if uncertainty switch is toggled, then restart model
        """
        if self.spills.uncertain != uncertain_value:
            self.spills.uncertain = uncertain_value  # update uncertainty
            self.rewind()

    @property
    def cache_enabled(self):
        return self._cache.enabled

    @cache_enabled.setter
    def cache_enabled(self, enabled):
        self._cache.enabled = enabled

    @property
    def id(self):
        return self._gnome_id.id

    @property
    def start_time(self):
        return self._start_time

    @start_time.setter
    def start_time(self, start_time):
        self._start_time = start_time
        self.rewind()

    @property
    def time_step(self):
        return self._time_step

    @time_step.setter
    def time_step(self, time_step):
        """
        Sets the time step, and rewinds the model

        :param time_step: The timestep can be a timedelta object
                          or integer seconds.
        """
        try:
            self._time_step = time_step.total_seconds()
        except AttributeError:
            self._time_step = int(time_step)

        # there is a zeroth time step
        self._num_time_steps = int(self._duration.total_seconds() // self._time_step) + 1
        self.rewind()

    @property
    def current_time_step(self):
        return self._current_time_step

    @current_time_step.setter
    def current_time_step(self, step):
        self.model_time = self._start_time + timedelta(seconds=step * self.time_step)
        self._current_time_step = step

    @property
    def duration(self):
        return self._duration

    @duration.setter
    def duration(self, duration):
        if duration < self._duration:
            # only need to rewind if shorter than it was...
            # fixme: actually, only need to rewind if current model time
            # is beyond new time...
            self.rewind()
        self._duration = duration

        # there is a zeroth time step
        self._num_time_steps = int(self._duration.total_seconds() // self.time_step) + 1

    @property
    def map(self):
        return self._map

    @map.setter
    def map(self, map_in):
        self._map = map_in
        self.rewind()

    @property
    def num_time_steps(self):
        return self._num_time_steps

    def setup_model_run(self):
        """
        Sets up each mover for the model run
        """
        self.spills.rewind()  # why is rewind for spills here?

        array_types = {}

        for mover in self.movers:
            mover.prepare_for_model_run()
            array_types.update(mover.array_types)

        for w in self.weatherers:
            w.prepare_for_model_run()
            array_types.update(w.array_types)

        for sc in self.spills.items():
            sc.prepare_for_model_run(array_types)

        # outputters need array_types, so this needs to come after those
        # have been updated.
        for outputter in self.outputters:
            outputter.prepare_for_model_run(
                model_start_time=self.start_time, cache=self._cache, uncertain=self.uncertain, spills=self.spills
            )

    def setup_time_step(self):
        """
        sets up everything for the current time_step:
        """
        # initialize movers differently if model uncertainty is on
        for m in self.movers:
            for sc in self.spills.items():
                m.prepare_for_model_step(sc, self.time_step, self.model_time)

        for w in self.weatherers:
            for sc in self.spills.items():
                # maybe we will setup a super-sampling step here???
                w.prepare_for_model_step(sc, self.time_step, self.model_time)

        for outputter in self.outputters:
            outputter.prepare_for_model_step(self.time_step, self.model_time)

    def move_elements(self):
        """
        Moves elements:
         - loops through all the movers. and moves the elements
         - sets new_position array for each spill
         - calls the beaching code to beach the elements that need beaching.
         - sets the new position
        """
        for sc in self.spills.items():
            if sc.num_released > 0:  # can this check be removed?

                # possibly refloat elements
                self.map.refloat_elements(sc, self.time_step)

                # reset next_positions
                (sc["next_positions"])[:] = sc["positions"]

                # loop through the movers
                for m in self.movers:
                    delta = m.get_move(sc, self.time_step, self.model_time)
                    sc["next_positions"] += delta

                self.map.beach_elements(sc)

                # the final move to the new positions
                (sc["positions"])[:] = sc["next_positions"]

    def weather_elements(self):
        """
        Weathers elements:
        - loops through all the weatherers, passing in the spill_container
          and the time range
        - a weatherer modifies the data arrays in the spill container, so a
          particular time range should not be run multiple times.  It is
          expected that we are processing a sequence of contiguous time ranges.
        - Note: If there are multiple sequential weathering processes, some
                inaccuracy could occur.  A proposed solution is to
                'super-sample' the model time step so that it will be replaced
                with many smaller time steps.  We'll have to see if this pans
                out in practice.
        """
        for sc in self.spills.items():
            for w in self.weatherers:
                for model_time, time_step in self._split_into_substeps():
                    w.weather_elements(sc, time_step, model_time)

    def _split_into_substeps(self):
        """
        :return: sequence of (datetime, timestep)
         (Note: we divide evenly on second boundaries.
                   Thus, there will likely be a remainder
                   that needs to be included.  We include
                   this remainder, which results in
                   1 more sub-step than we requested.)
        """
        time_step = int(self._time_step)
        sub_step = time_step / self.weathering_substeps

        indexes = [idx for idx in range(0, time_step + 1, sub_step)]
        res = [(idx, next_idx - idx) for idx, next_idx in zip(indexes, indexes[1:])]

        if sum(res[-1]) < time_step:
            # collect the remaining slice
            res.append((sum(res[-1]), time_step % sub_step))

        res = [(self.model_time + timedelta(seconds=idx), delta) for idx, delta in res]

        return res

    def step_is_done(self):
        """
        Loop through movers and call model_step_is_done
        """
        for mover in self.movers:
            for sc in self.spills.items():
                mover.model_step_is_done(sc)

        for w in self.weatherers:
            w.model_step_is_done()

        for sc in self.spills.items():
            "removes elements with oil_status.to_be_removed"
            sc.model_step_is_done()

            # age remaining particles
            sc["age"][:] = sc["age"][:] + self.time_step

        for outputter in self.outputters:
            outputter.model_step_is_done()

    def write_output(self):
        output_info = {"step_num": self.current_time_step}

        for outputter in self.outputters:
            if self.current_time_step == self.num_time_steps - 1:
                output = outputter.write_output(self.current_time_step, True)
            else:
                output = outputter.write_output(self.current_time_step)

            if output is not None:
                output_info.update(output)

        return output_info

    def step(self):
        """
        Steps the model forward (or backward) in time. Needs testing for
        hind casting.
        """
        for sc in self.spills.items():
            # Set the current time stamp only after current_time_step is
            # incremented and before the output is written. Set it to None here
            # just so we're not carrying around the old time_stamp
            sc.current_time_stamp = None

        # it gets incremented after this check
        if self.current_time_step >= self._num_time_steps - 1:
            raise StopIteration

        if self.current_time_step == -1:
            # that's all we need to do for the zeroth time step
            self.setup_model_run()
        else:
            self.setup_time_step()
            self.move_elements()
            self.weather_elements()
            self.step_is_done()

        self.current_time_step += 1

        # this is where the new step begins!
        # the elements released are during the time period:
        #    self.model_time + self.time_step
        # The else part of the loop computes values for data_arrays that
        # correspond with time_stamp:
        #    self.model_time + self.time_step
        # This is the current_time_stamp attribute of the SpillContainer
        #     [sc.current_time_stamp for sc in self.spills.items()]
        for sc in self.spills.items():
            sc.current_time_stamp = self.model_time

            # release particles for next step - these particles will be aged
            # in the next step
            sc.release_elements(self.time_step, self.model_time)

        # cache the results - current_time_step is incremented but the
        # current_time_stamp in spill_containers (self.spills) is not updated
        # till we go through the prepare_for_model_step
        self._cache.save_timestep(self.current_time_step, self.spills)
        output_info = self.write_output()
        return output_info

    def __iter__(self):
        """
        Rewinds the model and returns itself so it can be iterated over.
        """
        self.rewind()

        return self

    def next(self):
        """
        (This method satisfies Python's iterator and generator protocols)

        :return: the step number
        """
        return self.step()

    def full_run(self, rewind=True, log=False):
        """
        Do a full run of the model.

        :param rewind=True: whether to rewind the model first
                            -- if set to false, model will be run from the
                               current step to the end
        :returns: list of outputter info dicts
        """
        if rewind:
            self.rewind()

        # run the model
        output_data = []
        while True:
            try:
                results = self.step()
                if log:
                    print results
                output_data.append(results)
            except StopIteration:
                print "Done with the model run"
                break

        return output_data

    def movers_to_dict(self):
        """
        Call to_dict method of OrderedCollection object
        """
        return self.movers.to_dict()

    def weatherers_to_dict(self):
        """
        Call to_dict method of OrderedCollection object
        """
        return self.weatherers.to_dict()

    def environment_to_dict(self):
        """
        Call to_dict method of OrderedCollection object
        """
        return self.environment.to_dict()

    def spills_to_dict(self):
        return self.spills.to_dict()

    def outputters_to_dict(self):
        """
        Call to_dict method of OrderedCollection object
        """
        return self.outputters.to_dict()

    def map_to_dict(self):
        """
        returns the gnome object type as a string
        """
        return "{0}.{1}".format(self.map.__module__, self.map.__class__.__name__)

    def _callback_add_mover(self, obj_added):
        "Callback after mover has been added"
        if isinstance(obj_added, WindMover):
            if obj_added.wind.id not in self.environment:
                self.environment += obj_added.wind

        if isinstance(obj_added, CatsMover):
            if obj_added.tide is not None and obj_added.tide.id not in self.environment:
                self.environment += obj_added.tide

        self.rewind()  # rewind model if a new mover is added

    def _callback_add_weatherer(self, obj_added):
        "Callback after weatherer has been added"
        if isinstance(obj_added, Weatherer):
            # not sure what kind of dependencies we have just yet.
            pass

        self.rewind()  # rewind model if a new weatherer is added

    def __eq__(self, other):
        check = super(Model, self).__eq__(other)
        if check:
            # also check the data in spill_container object
            if type(self.spills) != type(other.spills):
                return False

            if self.spills != other.spills:
                return False

        return check

    def __ne__(self, other):
        "Compare inequality (!=) of two objects"
        if self == other:
            return False
        else:
            return True

    """
    Following methods are for saving a Model instance or creating a new
    model instance from a saved location
    """

    def save(self, saveloc):
        """
        save model in json format to user specified saveloc

        :param saveloc: A valid directory. Model files are either persisted
                        here or a new model is re-created from the files
                        stored here. The files are clobbered when save() is
                        called.
        :type saveloc: A path as a string or unicode
        """
        path_, savedir = os.path.split(saveloc)
        if path_ == "":
            path_ = "."

        if not os.path.exists(path_):
            raise ValueError('"{0}" does not exist. \nCannot create "{1}"'.format(path_, savedir))

        if not os.path.exists(saveloc):
            os.mkdir(saveloc)

        self._empty_save_dir(saveloc)
        json_ = self.serialize("create")
        self._save_json_to_file(saveloc, json_, "{0}.json".format(self.__class__.__name__))

        json_ = self.map.serialize("create")
        self._save_json_to_file(saveloc, json_, "{0}.json".format(self.map.__class__.__name__))

        self._save_collection(saveloc, self.movers)
        self._save_collection(saveloc, self.weatherers)
        self._save_collection(saveloc, self.environment)
        self._save_collection(saveloc, self.outputters)

        for sc in self.spills.items():
            self._save_collection(saveloc, sc.spills)

        # persist model _state since middle of run
        if self.current_time_step > -1:
            self._save_spill_data(os.path.join(saveloc, "spills_data_arrays.nc"))

    def _save_collection(self, saveloc, coll_):
        """
        Function loops over an orderedcollection or any other iterable
        containing a list of objects. It calls the to_dict method for each
        object, then converts it o valid json (dict_to_json),
        and finally saves it to file (_save_json_to_file)

        :param OrderedCollection coll_: ordered collection to be saved

        Note: The movers and weatherer objects reference the environment
        collection. If a field is saved as reference (field.save_reference is
        True), then this function adds json_[field.name] = index where
        index is the index into the environment array for the reference
        object. Currently, only objects in the environment collection are
        referenced by movers.
        """
        for count, obj in enumerate(coll_):
            json_ = obj.serialize("create")
            for field in obj._state:
                if field.save_reference:
                    "attribute is stored as a reference to environment list"
                    if getattr(obj, field.name) is not None:
                        obj_id = getattr(obj, field.name).id
                        index = self.environment.index(obj_id)
                        json_[field.name] = index

            self._save_json_to_file(saveloc, json_, "{0}_{1}.json".format(obj.__class__.__name__, count))

    def _save_json_to_file(self, saveloc, data, name):
        """
        write json data to file

        :param dict data: JSON data to be saved
        :param obj: gnome object corresponding w/ data
        """

        fname = os.path.join(saveloc, name)
        data = self._move_data_file(saveloc, data)  # if there is a

        with open(fname, "w") as outfile:
            json.dump(data, outfile, indent=True)

    def _move_data_file(self, saveloc, json_):
        """
        Look at _state attribute of object. Find all fields with 'isdatafile'
        attribute as True. If there is a key in to_json corresponding with
        'name' of the fields with True 'isdatafile' attribute then move that
        datafile and update the key in the to_json to point to new location

        todo: maybe this belongs in serializable base class? Revisit this
        """
        _state = eval("{0}._state".format(json_["obj_type"]))
        fields = _state.get_field_by_attribute("isdatafile")

        for field in fields:
            if field.name not in json_:
                continue

            value = json_[field.name]

            if os.path.exists(value) and os.path.isfile(value):
                shutil.copy(value, saveloc)
                json_[field.name] = os.path.split(json_[field.name])[1]

        return json_

    def _save_spill_data(self, datafile):
        """ save the data arrays for current timestep to NetCDF """
        nc_out = NetCDFOutput(datafile, which_data="all", cache=self._cache)
        nc_out.prepare_for_model_run(model_start_time=self.start_time, uncertain=self.uncertain, spills=self.spills)
        nc_out.write_output(self.current_time_step)

    def _empty_save_dir(self, saveloc):
        """
        Remove all files, directories under saveloc

        First clean out directory, then add new save files
        This should only be called by self.save()
        """
        (dirpath, dirnames, filenames) = os.walk(saveloc).next()

        if dirnames:
            for dir_ in dirnames:
                shutil.rmtree(os.path.join(dirpath, dir_))

        if filenames:
            for file_ in filenames:
                os.remove(os.path.join(dirpath, file_))
Пример #17
0
def test_eq_spill_container_pair(uncertain):
    """
    SpillContainerPair inherits from SpillContainer so it should
    compute __eq__ and __ne__ in the same way - test it here

    Incomplete - this doesn't currently work!
    Test fails if uncertainty is on whether particles are released or not
    This is because 'id' of uncertain spills do not match and one should not
    expect them to match.

    todo: remove 'id' property as a check for equality. This requires changes
          in persisting logic. Update persistence then revisit this test
          and simplify it
    """
    (sp1, sp2) = get_eq_spills()

    # windages array will not match after elements are released so lets not
    # add any more types to data_arrays for this test. Just look at base
    # array_types for SpillContainer's and ensure the data matches for them
    #sp1.element_type = ElementType()
    #sp2.element_type = ElementType()

    scp1 = SpillContainerPair(uncertain)  # uncertainty is on
    scp1.add(sp1)

    scp2 = SpillContainerPair(uncertain)
    if uncertain:
        u_sp1 = [
            scp1.items()[1].spills[spill.id]
            for spill in scp1.items()[1].spills
        ][0]

        u_sp2 = copy.deepcopy(u_sp1)
        # deepcopy does not match ids!
        # for test, we need these to match so force them to be equal here
        u_sp2._id = u_sp1.id

        scp2.add((sp2, u_sp2))
    else:
        scp2.add(sp2)

    for sc in zip(scp1.items(), scp2.items()):
        sc[0].prepare_for_model_run()
        sc[0].release_elements(360, sp1.release.release_time)
        sc[1].prepare_for_model_run()
        sc[1].release_elements(360, sp2.release.release_time)

    assert scp1 == scp2
    assert scp2 == scp1
    assert not (scp1 != scp2)
    assert not (scp2 != scp1)
Пример #18
0
def test_eq_spill_container_pair(uncertain):
    """
    SpillContainerPair inherits from SpillContainer so it should
    compute __eq__ and __ne__ in the same way - test it here

    Incomplete - this doesn't currently work!
    Test fails if uncertainty is on whether particles are released or not
    This is because 'id' of uncertain spills do not match and one should not
    expect them to match.

    todo: remove 'id' property as a check for equality. This requires changes
          in persisting logic. Update persistence then revisit this test
          and simplify it
    """
    (sp1, sp2) = get_eq_spills()

    # windages array will not match after elements are released so lets not
    # add any more types to data_arrays for this test. Just look at base
    # array_types for SpillContainer's and ensure the data matches for them
    #sp1.element_type = ElementType()
    #sp2.element_type = ElementType()

    scp1 = SpillContainerPair(uncertain)  # uncertainty is on
    scp1.add(sp1)

    scp2 = SpillContainerPair(uncertain)
    if uncertain:
        u_sp1 = [scp1.items()[1].spills[spill.id] for spill in
                 scp1.items()[1].spills][0]

        u_sp2 = copy.deepcopy(u_sp1)
        # deepcopy does not match ids!
        # for test, we need these to match so force them to be equal here
        u_sp2._id = u_sp1.id

        scp2.add((sp2, u_sp2))
    else:
        scp2.add(sp2)

    for sc in zip(scp1.items(), scp2.items()):
        sc[0].prepare_for_model_run()
        sc[0].release_elements(360, sp1.release.release_time)
        sc[1].prepare_for_model_run()
        sc[1].release_elements(360, sp2.release.release_time)

    assert scp1 == scp2
    assert scp2 == scp1
    assert not (scp1 != scp2)
    assert not (scp2 != scp1)
Пример #19
0
class Model(Serializable):
    '''
    PyGnome Model Class
    '''
    _update = ['time_step',
               'weathering_substeps',
               'start_time',
               'duration',
               'time_step',
               'uncertain',
               'cache_enabled',
               'weathering_substeps',
               'map',
               'movers',
               'weatherers',
               'environment',
               'outputters'
               ]
    _create = []
    _create.extend(_update)
    _state = copy.deepcopy(Serializable._state)
    _schema = ModelSchema

    # no need to copy parent's _state in this case
    _state.add(save=_create, update=_update)

    # override __eq__ since 'spills' and 'uncertain_spills' need to be checked
    # They both have _to_dict() methods to return underlying ordered
    # collections and that would not be the correct way to check equality
    _state += [Field('spills', save=True, update=True, test_for_eq=False),
               Field('uncertain_spills', save=True, test_for_eq=False)]

    # list of OrderedCollections
    _oc_list = ['movers', 'weatherers', 'environment', 'outputters']

    @classmethod
    def new_from_dict(cls, dict_):
        'Restore model from previously persisted _state'
        json_ = dict_.pop('json_')
        l_env = dict_.pop('environment', [])
        l_out = dict_.pop('outputters', [])
        l_movers = dict_.pop('movers', [])
        l_weatherers = dict_.pop('weatherers', [])
        c_spills = dict_.pop('spills', [])

        if 'uncertain_spills' in dict_:
            u_spills = dict_.pop('uncertain_spills')
            l_spills = zip(c_spills, u_spills)
        else:
            l_spills = c_spills

        # define defaults for properties that a location file may not contain
        kwargs = inspect.getargspec(cls.__init__)
        default_restore = dict(zip(kwargs[0][1:], kwargs[3]))

        if json_ == 'webapi':
            # default is to enable cache
            default_restore['cache_enabled'] = True

        for key in default_restore:
            default_restore[key] = dict_.pop(key, default_restore[key])

        model = object.__new__(cls)
        model.__restore__(**default_restore)

        # if there are other values in dict_, setattr
        if json_ == 'webapi':
            model.update_from_dict(dict_)
        else:
            cls._restore_attr_from_save(model, dict_)

        [model.environment.add(obj) for obj in l_env]
        [model.outputters.add(obj) for obj in l_out]
        [model.spills.add(obj) for obj in l_spills]
        [model.movers.add(obj) for obj in l_movers]
        [model.weatherers.add(obj) for obj in l_weatherers]

        # register callback with OrderedCollection
        model.movers.register_callback(model._callback_add_mover,
                                       ('add', 'replace'))

        model.weatherers.register_callback(model._callback_add_weatherer,
                                           ('add', 'replace'))

        # restore the spill data outside this method - let's not try to find
        # the saveloc here
        return model

    def __init__(self,
                 time_step=timedelta(minutes=15),
                 start_time=round_time(datetime.now(), 3600),
                 duration=timedelta(days=1),
                 weathering_substeps=1,
                 map=None,
                 uncertain=False,
                 cache_enabled=False,
                 name=None):
        '''

        Initializes a model.
        All arguments have a default.

        :param time_step=timedelta(minutes=15): model time step in seconds
            or as a timedelta object
        :param start_time=datetime.now(): start time of model, datetime
            object. Rounded to the nearest hour.
        :param duration=timedelta(days=1): How long to run the model,
            a timedelta object.
        :param int weathering_substeps=1: How many weathering substeps to
            run inside a single model time step.
        :param map=gnome.map.GnomeMap(): The land-water map.
        :param uncertain=False: Flag for setting uncertainty.
        :param cache_enabled=False: Flag for setting whether the model should
            cache results to disk.
        '''

        self.__restore__(time_step, start_time, duration,
                         weathering_substeps,
                         uncertain, cache_enabled, map, name)

        # register callback with OrderedCollection
        self.movers.register_callback(self._callback_add_mover,
                                      ('add', 'replace'))

        self.weatherers.register_callback(self._callback_add_weatherer,
                                          ('add', 'replace'))

    def __restore__(self, time_step, start_time, duration,
                    weathering_substeps, uncertain, cache_enabled, map, name):
        '''
        Take out initialization that does not register the callback here.
        This is because new_from_dict will use this to restore the model _state
        when doing a midrun persistence.
        '''
        # making sure basic stuff is in place before properties are set
        self.environment = OrderedCollection(dtype=Environment)
        self.movers = OrderedCollection(dtype=Mover)
        self.weatherers = OrderedCollection(dtype=Weatherer)

        # contains both certain/uncertain spills
        self.spills = SpillContainerPair(uncertain)

        self._cache = gnome.utilities.cache.ElementCache()
        self._cache.enabled = cache_enabled

        # list of output objects
        self.outputters = OrderedCollection(dtype=Outputter)

        # default to now, rounded to the nearest hour
        self._start_time = start_time
        self._duration = duration
        self.weathering_substeps = weathering_substeps
        if not map:
            map = gnome.map.GnomeMap()

        if name:
            self.name = name

        self._map = map
        self.time_step = time_step  # this calls rewind() !

    def reset(self, **kwargs):
        '''
        Resets model to defaults -- Caution -- clears all movers, spills, etc.
        Takes same keyword arguments as :meth:`__init__()`
        '''
        self.__init__(**kwargs)

    def rewind(self):
        '''
        Rewinds the model to the beginning (start_time)
        '''

        # fixme: do the movers need re-setting? -- or wait for
        #        prepare_for_model_run?

        self.current_time_step = -1
        self.model_time = self._start_time

        # note: This may be redundant.  They will get reset in
        #       setup_model_run() anyway..

        self.spills.rewind()

        # set rand before each call so windages are set correctly
        gnome.utilities.rand.seed(1)

        # clear the cache:
        self._cache.rewind()

        for outputter in self.outputters:
            outputter.rewind()

#    def write_from_cache(self, filetype='netcdf', time_step='all'):
#        """
#        write the already-cached data to an output files.
#        """

    @property
    def uncertain(self):
        '''
        Uncertainty attribute of the model. If flag is toggled, rewind model
        '''
        return self.spills.uncertain

    @uncertain.setter
    def uncertain(self, uncertain_value):
        '''
        Uncertainty attribute of the model
        '''
        if self.spills.uncertain != uncertain_value:
            self.spills.uncertain = uncertain_value  # update uncertainty
            self.rewind()

    @property
    def cache_enabled(self):
        '''
        If True, then generated data is cached
        '''
        return self._cache.enabled

    @cache_enabled.setter
    def cache_enabled(self, enabled):
        self._cache.enabled = enabled

    @property
    def start_time(self):
        '''
        Start time of the simulation
        '''
        return self._start_time

    @start_time.setter
    def start_time(self, start_time):
        self._start_time = start_time
        self.rewind()

    @property
    def time_step(self):
        '''
        time step over which the dynamics is computed
        '''
        return self._time_step

    @time_step.setter
    def time_step(self, time_step):
        '''
        Sets the time step, and rewinds the model

        :param time_step: The timestep can be a timedelta object
                          or integer seconds.
        '''
        try:
            self._time_step = time_step.total_seconds()
        except AttributeError:
            self._time_step = int(time_step)

        # there is a zeroth time step
        self._num_time_steps = int(self._duration.total_seconds()
                                   // self._time_step) + 1
        self.rewind()

    @property
    def current_time_step(self):
        '''
        Current timestep of the simulation
        '''
        return self._current_time_step

    @current_time_step.setter
    def current_time_step(self, step):
        self.model_time = self._start_time + timedelta(seconds=step *
                                                       self.time_step)
        self._current_time_step = step

    @property
    def duration(self):
        '''
        total duration of the model run
        '''
        return self._duration

    @duration.setter
    def duration(self, duration):
        if duration < self._duration:
            # only need to rewind if shorter than it was...
            # fixme: actually, only need to rewind if current model time
            # is beyond new time...
            self.rewind()
        self._duration = duration

        # there is a zeroth time step
        self._num_time_steps = int(self._duration.total_seconds()
                                   // self.time_step) + 1

    @property
    def map(self):
        '''
        land water map used for simulation
        '''
        return self._map

    @map.setter
    def map(self, map_in):
        self._map = map_in
        self.rewind()

    @property
    def num_time_steps(self):
        '''
        Read only attribute
        computed number of timesteps based on py:attribute:`duration` and
        py:attribute:`time_step`
        '''
        return self._num_time_steps

    def setup_model_run(self):
        '''
        Sets up each mover for the model run
        '''
        self.spills.rewind()  # why is rewind for spills here?

        # remake orderedcollections defined by model
        for oc in [self.movers, self.weatherers,
                   self.outputters, self.environment]:
            oc.remake()

        array_types = {}

        for mover in self.movers:
            mover.prepare_for_model_run()
            array_types.update(mover.array_types)

        for w in self.weatherers:
            w.prepare_for_model_run()
            array_types.update(w.array_types)

        for sc in self.spills.items():
            sc.prepare_for_model_run(array_types)

        # outputters need array_types, so this needs to come after those
        # have been updated.
        for outputter in self.outputters:
            outputter.prepare_for_model_run(model_start_time=self.start_time,
                                            cache=self._cache,
                                            uncertain=self.uncertain,
                                            spills=self.spills)

    def setup_time_step(self):
        '''
        sets up everything for the current time_step:
        '''
        # initialize movers differently if model uncertainty is on
        for m in self.movers:
            for sc in self.spills.items():
                m.prepare_for_model_step(sc, self.time_step, self.model_time)

        for w in self.weatherers:
            for sc in self.spills.items():
                # maybe we will setup a super-sampling step here???
                w.prepare_for_model_step(sc, self.time_step, self.model_time)

        for outputter in self.outputters:
            outputter.prepare_for_model_step(self.time_step, self.model_time)

    def move_elements(self):
        '''
        Moves elements:
         - loops through all the movers. and moves the elements
         - sets new_position array for each spill
         - calls the beaching code to beach the elements that need beaching.
         - sets the new position
        '''
        for sc in self.spills.items():
            if sc.num_released > 0:  # can this check be removed?

                # possibly refloat elements
                self.map.refloat_elements(sc, self.time_step)

                # reset next_positions
                (sc['next_positions'])[:] = sc['positions']

                # loop through the movers
                for m in self.movers:
                    delta = m.get_move(sc, self.time_step, self.model_time)
                    sc['next_positions'] += delta

                self.map.beach_elements(sc)

                # the final move to the new positions
                (sc['positions'])[:] = sc['next_positions']

    def weather_elements(self):
        '''
        Weathers elements:

        - loops through all the weatherers, passing in the spill_container
          and the time range
        - a weatherer modifies the data arrays in the spill container, so a
          particular time range should not be run multiple times.  It is
          expected that we are processing a sequence of contiguous time ranges.
        - Note: If there are multiple sequential weathering processes, some
          inaccuracy could occur.  A proposed solution is to
          'super-sample' the model time step so that it will be replaced
          with many smaller time steps.  We'll have to see if this pans
          out in practice.

        '''
        for sc in self.spills.items():
            for w in self.weatherers:
                for model_time, time_step in self._split_into_substeps():
                    w.weather_elements(sc, time_step, model_time)

    def _split_into_substeps(self):
        '''
        :return: sequence of (datetime, timestep)
         (Note: we divide evenly on second boundaries.
                   Thus, there will likely be a remainder
                   that needs to be included.  We include
                   this remainder, which results in
                   1 more sub-step than we requested.)
        '''
        time_step = int(self._time_step)
        sub_step = time_step / self.weathering_substeps

        indexes = [idx for idx in range(0, time_step + 1, sub_step)]
        res = [(idx, next_idx - idx)
               for idx, next_idx in zip(indexes, indexes[1:])]

        if sum(res[-1]) < time_step:
            # collect the remaining slice
            res.append((sum(res[-1]), time_step % sub_step))

        res = [(self.model_time + timedelta(seconds=idx), delta)
               for idx, delta in res]

        return res

    def step_is_done(self):
        '''
        Loop through movers and call model_step_is_done
        '''
        for mover in self.movers:
            for sc in self.spills.items():
                mover.model_step_is_done(sc)

        for w in self.weatherers:
            w.model_step_is_done()

        for sc in self.spills.items():
            'removes elements with oil_status.to_be_removed'
            sc.model_step_is_done()

            # age remaining particles
            sc['age'][:] = sc['age'][:] + self.time_step

        for outputter in self.outputters:
            outputter.model_step_is_done()

    def write_output(self):
        output_info = {}

        for outputter in self.outputters:
            if self.current_time_step == self.num_time_steps - 1:
                output = outputter.write_output(self.current_time_step, True)
            else:
                output = outputter.write_output(self.current_time_step)

            if output is not None:
                output_info.update(output)

        if not output_info:
            return {'step_num': self.current_time_step}

        return output_info

    def step(self):
        '''
        Steps the model forward (or backward) in time. Needs testing for
        hind casting.
        '''
        for sc in self.spills.items():
            # Set the current time stamp only after current_time_step is
            # incremented and before the output is written. Set it to None here
            # just so we're not carrying around the old time_stamp
            sc.current_time_stamp = None

        # it gets incremented after this check
        if self.current_time_step >= self._num_time_steps - 1:
            raise StopIteration

        if self.current_time_step == -1:
            # that's all we need to do for the zeroth time step
            self.setup_model_run()
        else:
            self.setup_time_step()
            self.move_elements()
            self.weather_elements()
            self.step_is_done()

        self.current_time_step += 1

        # this is where the new step begins!
        # the elements released are during the time period:
        #    self.model_time + self.time_step
        # The else part of the loop computes values for data_arrays that
        # correspond with time_stamp:
        #    self.model_time + self.time_step
        # This is the current_time_stamp attribute of the SpillContainer
        #     [sc.current_time_stamp for sc in self.spills.items()]
        for sc in self.spills.items():
            sc.current_time_stamp = self.model_time

            # release particles for next step - these particles will be aged
            # in the next step
            sc.release_elements(self.time_step, self.model_time)

        # cache the results - current_time_step is incremented but the
        # current_time_stamp in spill_containers (self.spills) is not updated
        # till we go through the prepare_for_model_step
        self._cache.save_timestep(self.current_time_step, self.spills)
        output_info = self.write_output()
        return output_info

    def __iter__(self):
        '''
        Rewinds the model and returns itself so it can be iterated over.
        '''
        self.rewind()

        return self

    def next(self):
        '''
        (This method satisfies Python's iterator and generator protocols)

        :return: the step number
        '''
        return self.step()

    def full_run(self, rewind=True, log=False):
        '''
        Do a full run of the model.

        :param rewind=True: whether to rewind the model first -- if set to
            false, model will be run from the current step to the end
        :returns: list of outputter info dicts
        '''
        if rewind:
            self.rewind()

        # run the model
        output_data = []
        while True:
            try:
                results = self.step()

                if log:
                    print results

                output_data.append(results)
            except StopIteration:
                print 'Done with the model run'
                break

        return output_data

    def _callback_add_mover(self, obj_added):
        'Callback after mover has been added'
        if hasattr(obj_added, 'wind'):
            if obj_added.wind.id not in self.environment:
                self.environment += obj_added.wind

        if hasattr(obj_added, 'tide') and obj_added.tide is not None:
            if obj_added.tide.id not in self.environment:
                self.environment += obj_added.tide

        self.rewind()  # rewind model if a new mover is added

    def _callback_add_weatherer(self, obj_added):
        'Callback after weatherer has been added'
        if isinstance(obj_added, Weatherer):
            # not sure what kind of dependencies we have just yet.
            pass

        self.rewind()  # rewind model if a new weatherer is added

    def __eq__(self, other):
        check = super(Model, self).__eq__(other)
        print 'Model.__eq__(): super check =', check
        if check:
            # also check the data in ordered collections
            if type(self.spills) != type(other.spills):
                print 'Model.__eq__(): spill types:', (type(self.spills),
                                                       type(other.spills))
                return False

            if self.spills != other.spills:
                print 'Model.__eq__(): spills:'
                pp.pprint((self.spills, other.spills))
                return False

        return check

    def __ne__(self, other):
        return not self == other

    '''
    Following methods are for saving a Model instance or creating a new
    model instance from a saved location
    '''
    def spills_to_dict(self):
        '''
        return the spills ordered collection for serialization
        '''
        return self.spills.to_dict()['spills']

    def uncertain_spills_to_dict(self):
        '''
        return the uncertain_spills ordered collection for serialization/save
        files
        '''
        if self.uncertain:
            dict_ = self.spills.to_dict()
            return dict_['uncertain_spills']

        return None

    def save(self, saveloc, references=None, name=None):
        # Note: Defining references=References() in the function definition
        # keeps the references object in memory between tests - it changes the
        # scope of Referneces() to be outside the Model() instance. We don't
        # want this
        references = (references, References())[references is None]
        self._make_saveloc(saveloc)
        self._empty_save_dir(saveloc)
        json_ = self.serialize('save')

        # map is the only nested structure - let's manually call
        # _move_data_file on it
        self.map._move_data_file(saveloc, json_['map'])

        for oc in self._oc_list:
            coll_ = getattr(self, oc)
            self._save_collection(saveloc, coll_, references, json_[oc])

        for sc in self.spills.items():
            if sc.uncertain:
                key = 'uncertain_spills'
            else:
                key = 'spills'

            self._save_collection(saveloc, sc.spills, references, json_[key])

        if self.current_time_step > -1:
            '''
            hard code the filename - can make this an attribute if user wants
            to change it - but not sure if that will ever be needed?
            '''
            self._save_spill_data(os.path.join(saveloc,
                                        'spills_data_arrays.nc'))

        # there should be no more references
        self._json_to_saveloc(json_, saveloc, references, name)
        if name and references.reference(self) != name:
            # todo: want a warning here instead of an exception
            raise Exception("{0} already exists, cannot name "
                "the model's json file: {0}".format(name))
            pass
        return references

    def _save_collection(self, saveloc, coll_, refs, coll_json):
        """
        Reference objects inside OrderedCollections. Since the OC itself
        isn't a reference but the objects in the list are a reference, do
        something a little differently here

        :param OrderedCollection coll_: ordered collection to be saved
        """
        for count, obj in enumerate(coll_):
            json_ = obj.serialize('save')
            for field in obj._state:
                if field.save_reference:
                    'attribute is stored as a reference to environment list'
                    if getattr(obj, field.name) is not None:
                        ref_obj = getattr(obj, field.name)
                        index = self.environment.index(ref_obj)
                        json_[field.name] = index
            obj_ref = refs.get_reference(obj)
            if obj_ref is None:
                # try following name - if 'fname' already exists in references,
                # then obj.save() assigns a different name to file
                fname = '{0.__class__.__name__}_{1}.json'.format(obj, count)
                obj.save(saveloc, refs, fname)
                coll_json[count]['id'] = refs.reference(obj)
            else:
                coll_json[count]['id'] = obj_ref

    def _save_spill_data(self, datafile):
        """ save the data arrays for current timestep to NetCDF """
        nc_out = NetCDFOutput(datafile, which_data='all', cache=self._cache)
        nc_out.prepare_for_model_run(model_start_time=self.start_time,
                                     uncertain=self.uncertain,
                                     spills=self.spills)
        nc_out.write_output(self.current_time_step)

    def _load_spill_data(self, spill_data):
        """
        load NetCDF file and add spill data back in - designed for savefiles
        """

        if not os.path.exists(spill_data):
            return

        if self.uncertain:
            saveloc, spill_data_fname = os.path.split(spill_data)
            spill_data_fname, ext = os.path.splitext(spill_data_fname)
            u_spill_data = os.path.join(saveloc,
                '{0}_uncertain{1}'.format(spill_data_fname, ext))

        array_types = {}

        for m in self.movers:
            array_types.update(m.array_types)

        for w in self.weatherers:
            array_types.update(w.array_types)

        for sc in self.spills.items():
            if sc.uncertain:
                data = NetCDFOutput.read_data(u_spill_data, time=None,
                                              which_data='all')
            else:
                data = NetCDFOutput.read_data(spill_data, time=None,
                                              which_data='all')

            sc.current_time_stamp = data.pop('current_time_stamp').item()
            sc._data_arrays = data
            sc._array_types.update(array_types)

    def _empty_save_dir(self, saveloc):
        '''
        Remove all files, directories under saveloc

        First clean out directory, then add new save files
        This should only be called by self.save()
        '''
        (dirpath, dirnames, filenames) = os.walk(saveloc).next()

        if dirnames:
            for dir_ in dirnames:
                shutil.rmtree(os.path.join(dirpath, dir_))

        if filenames:
            for file_ in filenames:
                os.remove(os.path.join(dirpath, file_))

    def serialize(self, json_='webapi'):
        '''
        Serialize Model object
        treat special-case attributes of Model.
        '''
        toserial = self.to_serialize(json_)
        schema = self.__class__._schema()
        o_json_ = schema.serialize(toserial)
        o_json_['map'] = self.map.serialize(json_)

        if json_ == 'webapi':
            for attr in ('environment', 'outputters', 'weatherers', 'movers',
                         'spills'):
                o_json_[attr] = self.serialize_oc(attr, json_)

        return o_json_

    def serialize_oc(self, attr, json_='webapi'):
        '''
        Serialize Model attributes of type ordered collection
        '''
        json_out = []
        attr = getattr(self, attr)
        if isinstance(attr, (OrderedCollection, SpillContainerPair)):
            for item in attr:
                json_out.append(item.serialize(json_))
        return json_out

    @classmethod
    def deserialize(cls, json_):
        '''
        treat special-case attributes of Model.
        '''
        deserial = cls._schema().deserialize(json_)

        if 'map' in json_ and json_['map']:
            deserial['map'] = json_['map']

        if json_['json_'] == 'webapi':
            for attr in ('environment', 'outputters', 'weatherers', 'movers',
                         'spills'):
                if attr in json_ and json_[attr]:
                    deserial[attr] = cls.deserialize_oc(json_[attr])

        return deserial

    @classmethod
    def deserialize_oc(cls, json_):
        '''
        check contents of orderered collections to figure out what schema to
        use.
        Basically, the json serialized ordered collection looks like a regular
        list.
        '''
        deserial = []
        for item in json_:
            fqn = item['obj_type']
            name, scope = (list(reversed(fqn.rsplit('.', 1)))
                           if fqn.find('.') >= 0
                           else [fqn, ''])
            my_module = __import__(scope, globals(), locals(), [str(name)], -1)
            py_class = getattr(my_module, name)

            deserial.append(py_class.deserialize(item))

        return deserial

    @classmethod
    def load(cls, saveloc, json_data, references=None):
        '''
        '''
        references = (references, References())[references is None]

        # model has no datafiles or 'save_reference' attributes so no need to
        # do anything special for it. But let's add this as a check anyway
        datafiles = cls._state.get_field_by_attribute('isdatafile')
        ref_fields = cls._state.get_field_by_attribute('save_reference')
        if (datafiles or ref_fields):
            raise Exception("Model.load() assumes none of the attributes "
                "defining the state 'isdatafile' or is 'save_reference'. "
                "If this changes, then we need to make this more robust.")

        # deserialize after removing references
        _to_dict = cls.deserialize(json_data)

        # load nested map object and add it - currently, 'load' is only used
        # for laoding save files/location files, so it assumes:
        # json_data['json_'] == 'save'
        if ('map' in json_data):
            map_obj = eval(json_data['map']['obj_type']).load(saveloc,
                json_data['map'], references)
            _to_dict['map'] = map_obj

        # load collections
        for oc in cls._oc_list:
            if oc in _to_dict:
                _to_dict[oc] = cls._load_collection(saveloc, _to_dict[oc],
                    references)
        for spill in ['spills', 'uncertain_spills']:
            if spill in _to_dict:
                _to_dict[spill] = cls._load_collection(saveloc,
                                                       _to_dict[spill],
                                                       references)
            # also need to load spill data for mid-run save!

        model = cls.new_from_dict(_to_dict)

        model._load_spill_data(os.path.join(saveloc, 'spills_data_arrays.nc'))

        return model

    @classmethod
    def _load_collection(cls, saveloc, l_coll_dict, refs):
        '''
        doesn't need to be classmethod of the Model, but its only used by
        Model at present
        '''
        l_coll = []
        for item in l_coll_dict:
            i_ref = item['id']
            if refs.retrieve(i_ref):
                l_coll.append(refs.retrieve(i_ref))
            else:
                f_name = os.path.join(saveloc, item['id'])
                obj = load(f_name, refs)    # will add obj to refs
                l_coll.append(obj)
        return (l_coll)