Esempio n. 1
0
    def generate(self, py_ast, funcvars):
        # Replace occurences of intrinsic objects in Python AST
        transformer = IntrinsicTransformer(self.fieldset, self.ptype)
        py_ast = transformer.visit(py_ast)

        # Untangle Pythonic tuple-assignment statements
        py_ast = TupleSplitter().visit(py_ast)

        # Generate C-code for all nodes in the Python AST
        self.visit(py_ast)
        self.ccode = py_ast.ccode

        # Insert variable declarations for non-instrinsics
        # Make sure that repeated variables are not declared more than
        # once. If variables occur in multiple Kernels, give a warning
        used_vars = []
        for kvar in funcvars:
            if kvar in used_vars:
                logger.warning(kvar+" declared in multiple Kernels")
                funcvars.remove(kvar)
            else:
                used_vars.append(kvar)
        for kvar in self.kernel_vars + self.array_vars:
            if kvar in funcvars:
                funcvars.remove(kvar)
        self.ccode.body.insert(0, c.Value('ErrorCode', 'err'))
        if len(funcvars) > 0:
            self.ccode.body.insert(0, c.Value("float", ", ".join(funcvars)))
        if len(transformer.tmp_vars) > 0:
            self.ccode.body.insert(0, c.Value("float", ", ".join(transformer.tmp_vars)))

        return self.ccode
Esempio n. 2
0
    def __init__(self, fieldset, ptype, pyfunc=None, funcname=None,
                 funccode=None, py_ast=None, funcvars=None):
        self.fieldset = fieldset
        self.ptype = ptype

        # Derive meta information from pyfunc, if not given
        self.funcname = funcname or pyfunc.__name__
        if pyfunc is AdvectionRK4_3D:
            logger.info('Note that positive vertical velocity is assumed DOWNWARD by AdvectionRK4_3D')
        if funcvars is not None:
            self.funcvars = funcvars
        elif hasattr(pyfunc, '__code__'):
            self.funcvars = list(pyfunc.__code__.co_varnames)
        else:
            self.funcvars = None
        self.funccode = funccode or inspect.getsource(pyfunc.__code__)
        # Parse AST if it is not provided explicitly
        self.py_ast = py_ast or parse(fix_indentation(self.funccode)).body[0]
        if pyfunc is None:
            # Extract user context by inspecting the call stack
            stack = inspect.stack()
            try:
                user_ctx = stack[-1][0].f_globals
                user_ctx['math'] = globals()['math']
                user_ctx['random'] = globals()['random']
                user_ctx['ErrorCode'] = globals()['ErrorCode']
            except:
                logger.warning("Could not access user context when merging kernels")
                user_ctx = globals()
            finally:
                del stack  # Remove cyclic references
            # Compile and generate Python function from AST
            py_mod = Module(body=[self.py_ast])
            exec(compile(py_mod, "<ast>", "exec"), user_ctx)
            self.pyfunc = user_ctx[self.funcname]
        else:
            self.pyfunc = pyfunc
        self.name = "%s%s" % (ptype.name, self.funcname)

        # Generate the kernel function and add the outer loop
        if self.ptype.uses_jit:
            kernelgen = KernelGenerator(fieldset, ptype)
            self.field_args = kernelgen.field_args
            kernel_ccode = kernelgen.generate(deepcopy(self.py_ast),
                                              self.funcvars)
            self.field_args = kernelgen.field_args
            self.const_args = kernelgen.const_args
            loopgen = LoopGenerator(fieldset, ptype)
            self.ccode = loopgen.generate(self.funcname, self.field_args, self.const_args,
                                          kernel_ccode)

            basename = path.join(get_cache_dir(), self._cache_key)
            self.src_file = "%s.c" % basename
            self.lib_file = "%s.%s" % (basename, 'dll' if platform == 'win32' else 'so')
            self.log_file = "%s.log" % basename
        self._lib = None
Esempio n. 3
0
    def write(self, pset, time, sync=True):
        """Write :class:`parcels.particleset.ParticleSet` data to file

        :param pset: ParticleSet object to write
        :param time: Time at which to write ParticleSet
        :param sync: Optional argument whether to write data to disk immediately. Default is True

        """
        if isinstance(time, delta):
            time = time.total_seconds()
        if self.lasttime_written != time:  # only write if 'time' hasn't been written yet
            self.lasttime_written = time
            if self.type is 'array':
                # Check if largest particle ID is smaller than the last ID in ParticleFile.
                # Otherwise, new particles have been added and netcdf will fail
                if pset.size > 0:
                    if max([p.id for p in pset]) > self.id[-1]:
                        logger.error(
                            "Number of particles appears to increase. Use type='indexed' for ParticleFile"
                        )

                    # Finds the indices (inds) of the particle IDs in the ParticleFile,
                    # because particles can have been deleted
                    pids = [p.id for p in pset]
                    inds = np.in1d(self.id[:], pids, assume_unique=True)
                    inds = np.arange(len(self.id[:]))[inds]

                    self.time[inds, self.idx] = time
                    self.lat[inds, self.idx] = np.array([p.lat for p in pset])
                    self.lon[inds, self.idx] = np.array([p.lon for p in pset])
                    self.z[inds, self.idx] = np.array([p.depth for p in pset])
                    for var in self.user_vars:
                        getattr(self, var)[inds, self.idx] = np.array(
                            [getattr(p, var) for p in pset])
                else:
                    logger.warning("ParticleSet is empty on writing as array")

                self.idx += 1
            elif self.type is 'indexed':
                ind = np.arange(pset.size) + self.idx
                self.id[ind] = np.array([p.id for p in pset])
                self.time[ind] = time
                self.lat[ind] = np.array([p.lat for p in pset])
                self.lon[ind] = np.array([p.lon for p in pset])
                self.z[ind] = np.array([p.depth for p in pset])
                for var in self.user_vars:
                    getattr(self, var)[ind] = np.array(
                        [getattr(p, var) for p in pset])

                self.idx += pset.size

        if sync:
            self.sync()
Esempio n. 4
0
    def write(self, pset, time, sync=True, deleted_only=False):
        """Write :class:`parcels.particleset.ParticleSet` data to file

        :param pset: ParticleSet object to write
        :param time: Time at which to write ParticleSet
        :param sync: Optional argument whether to write data to disk immediately. Default is True

        """
        if self.dataset is None:
            self.open_dataset()
        if isinstance(time, delta):
            time = time.total_seconds()
        if self.lasttime_written != time and \
           (self.write_ondelete is False or deleted_only is True):
            if pset.size > 0:

                first_write = [
                    p for p in pset
                    if (p.fileid < 0 or len(self.idx) == 0) and p.dt *
                    p.time <= p.dt * time
                ]  # len(self.idx)==0 in case pset is written to new ParticleFile
                for p in first_write:
                    p.fileid = self.lasttraj
                    self.lasttraj += 1

                self.idx = np.append(self.idx, np.zeros(len(first_write)))

                for p in pset:
                    if p.dt * p.time <= p.dt * time:  # don't write particles if they haven't started yet
                        i = p.fileid
                        self.id[i, self.idx[i]] = p.id
                        self.time[i, self.idx[i]] = time
                        self.lat[i, self.idx[i]] = p.lat
                        self.lon[i, self.idx[i]] = p.lon
                        self.z[i, self.idx[i]] = p.depth
                        for var in self.user_vars:
                            getattr(self, var)[i,
                                               self.idx[i]] = getattr(p, var)
                for p in first_write:
                    for var in self.user_vars_once:
                        getattr(self, var)[p.fileid] = getattr(p, var)
            else:
                logger.warning("ParticleSet is empty on writing as array")

            if not deleted_only:
                self.idx += 1
                self.lasttime_written = time

        if sync:
            self.sync()
Esempio n. 5
0
    def write(self, pset, time, sync=True, deleted_only=False):
        """Write :class:`parcels.particleset.ParticleSet` data to file

        :param pset: ParticleSet object to write
        :param time: Time at which to write ParticleSet
        :param sync: Optional argument whether to write data to disk immediately. Default is True

        """
        if isinstance(time, delta):
            time = time.total_seconds()
        if self.lasttime_written != time and \
           (self.write_ondelete is False or deleted_only is True):
            self.lasttime_written = time
            if pset.size > 0:

                first_write = [p for p in pset if p.fileid < 0]
                for p in first_write:
                    p.fileid = self.lasttraj
                    self.lasttraj += 1

                inds = [p.fileid for p in pset]

                self.id[inds, self.idx] = [p.id for p in pset]
                self.time[inds, self.idx] = time
                self.lat[inds, self.idx] = np.array([p.lat for p in pset])
                self.lon[inds, self.idx] = np.array([p.lon for p in pset])
                self.z[inds, self.idx] = np.array([p.depth for p in pset])
                for var in self.user_vars:
                    getattr(self, var)[inds, self.idx] = np.array(
                        [getattr(p, var) for p in pset])
                for var in self.user_vars_once:
                    if np.any(first_write):
                        vals = [getattr(p, var) for p in first_write]
                        newinds = [p.fileid for p in first_write]
                        getattr(self, var)[newinds] = np.array(vals)
            else:
                logger.warning("ParticleSet is empty on writing as array")

            self.idx += 1

        if sync:
            self.sync()
Esempio n. 6
0
    def execute(self, pyfunc=AdvectionRK4, starttime=None, endtime=None, dt=1.,
                runtime=None, interval=None, recovery=None, output_file=None,
                show_movie=False):
        """Execute a given kernel function over the particle set for
        multiple timesteps. Optionally also provide sub-timestepping
        for particle output.

        :param pyfunc: Kernel function to execute. This can be the name of a
                       defined Python function or a :class:`parcels.kernel.Kernel` object.
                       Kernels can be concatenated using the + operator
        :param starttime: Starting time for the timestepping loop. Defaults to 0.0.
        :param endtime: End time for the timestepping loop
        :param runtime: Length of the timestepping loop. Use instead of endtime.
        :param dt: Timestep interval to be passed to the kernel
        :param interval: Interval for inner sub-timestepping (leap), which dictates
                         the update frequency of file output and animation.
        :param output_file: :mod:`parcels.particlefile.ParticleFile` object for particle output
        :param recovery: Dictionary with additional `:mod:parcels.kernels.error`
                         recovery kernels to allow custom recovery behaviour in case of
                         kernel errors.
        :param show_movie: True shows particles; name of field plots that field as background
        """

        # check if pyfunc has changed since last compile. If so, recompile
        if self.kernel is None or (self.kernel.pyfunc is not pyfunc and self.kernel is not pyfunc):
            # Generate and store Kernel
            if isinstance(pyfunc, Kernel):
                self.kernel = pyfunc
            else:
                self.kernel = self.Kernel(pyfunc)
            # Prepare JIT kernel execution
            if self.ptype.uses_jit:
                self.kernel.remove_lib()
                self.kernel.compile(compiler=GNUCompiler())
                self.kernel.load_lib()

        # Convert all time variables to seconds
        if isinstance(starttime, delta):
            starttime = starttime.total_seconds()
        if isinstance(endtime, delta):
            endtime = endtime.total_seconds()
        if isinstance(runtime, delta):
            runtime = runtime.total_seconds()
        if isinstance(dt, delta):
            dt = dt.total_seconds()
        if isinstance(interval, delta):
            interval = interval.total_seconds()
        if isinstance(starttime, datetime):
            starttime = (starttime - self.time_origin).total_seconds()
        if isinstance(endtime, datetime):
            endtime = (endtime - self.time_origin).total_seconds()

        # Derive starttime, endtime and interval from arguments or fieldset defaults
        if runtime is not None and endtime is not None:
            raise RuntimeError('Only one of (endtime, runtime) can be specified')
        if starttime is None:
            starttime = self.fieldset.U.time[0] if dt > 0 else self.fieldset.U.time[-1]
        if runtime is not None:
            if runtime < 0:
                runtime = np.abs(runtime)
                logger.warning("Negating runtime because it has to be positive")
            endtime = starttime + runtime * np.sign(dt)
        else:
            if endtime is None:
                endtime = self.fieldset.U.time[-1] if dt > 0 else self.fieldset.U.time[0]
        if interval is None:
            interval = endtime - starttime

        # Ensure that dt and interval have the correct sign
        if endtime > starttime:  # Time-forward mode
            if dt < 0:
                dt *= -1.
                logger.warning("Negating dt because running in time-forward mode")
            if interval < 0:
                interval *= -1.
                logger.warning("Negating interval because running in time-forward mode")
        if endtime < starttime:  # Time-backward mode
            if dt > 0.:
                dt *= -1.
                logger.warning("Negating dt because running in time-backward mode")
            if interval > 0.:
                interval *= -1.
                logger.warning("Negating interval because running in time-backward mode")

        # Initialise particle timestepping
        for p in self:
            p.time = starttime
            p.dt = dt
        # Execute time loop in sub-steps (timeleaps)
        timeleaps = int((endtime - starttime) / interval)
        assert(timeleaps >= 0)
        leaptime = starttime
        for _ in range(timeleaps):
            # First write output_file, because particles could have been added
            if output_file:
                output_file.write(self, leaptime)
            if show_movie:
                self.show(field=show_movie, show_time=leaptime)
            leaptime += interval
            self.kernel.execute(self, endtime=leaptime, dt=dt,
                                recovery=recovery)
        # Write out a final output_file
        if output_file:
            output_file.write(self, leaptime)
Esempio n. 7
0
    def execute(self,
                pyfunc=AdvectionRK4,
                endtime=None,
                runtime=None,
                dt=1.,
                interval=None,
                recovery=None,
                output_file=None,
                show_movie=False):
        """Execute a given kernel function over the particle set for
        multiple timesteps. Optionally also provide sub-timestepping
        for particle output.

        :param pyfunc: Kernel function to execute. This can be the name of a
                       defined Python function or a :class:`parcels.kernel.Kernel` object.
                       Kernels can be concatenated using the + operator
        :param endtime: End time for the timestepping loop
        :param runtime: Length of the timestepping loop. Use instead of endtime.
        :param dt: Timestep interval to be passed to the kernel
        :param interval: Interval for inner sub-timestepping (leap), which dictates
                         the update frequency of file output and animation.
        :param output_file: :mod:`parcels.particlefile.ParticleFile` object for particle output
        :param recovery: Dictionary with additional `:mod:parcels.kernels.error`
                         recovery kernels to allow custom recovery behaviour in case of
                         kernel errors.
        :param show_movie: True shows particles; name of field plots that field as background
        """

        # check if pyfunc has changed since last compile. If so, recompile
        if self.kernel is None or (self.kernel.pyfunc is not pyfunc
                                   and self.kernel is not pyfunc):
            # Generate and store Kernel
            if isinstance(pyfunc, Kernel):
                self.kernel = pyfunc
            else:
                self.kernel = self.Kernel(pyfunc)
            # Prepare JIT kernel execution
            if self.ptype.uses_jit:
                self.kernel.remove_lib()
                self.kernel.compile(compiler=GNUCompiler())
                self.kernel.load_lib()

        # Convert all time variables to seconds
        if isinstance(endtime, delta):
            endtime = endtime.total_seconds()
        elif isinstance(endtime, datetime):
            endtime = (endtime - self.time_origin).total_seconds()
        if isinstance(runtime, delta):
            runtime = runtime.total_seconds()
        if isinstance(dt, delta):
            dt = dt.total_seconds()
        if isinstance(interval, delta):
            interval = interval.total_seconds()

        # Set particle.time defaults based on sign of dt, if not set at ParticleSet construction
        for p in self:
            if np.isnan(p.time):
                p.time = self.fieldset.U.grid.time[
                    0] if dt >= 0 else self.fieldset.U.grid.time[-1]

        # Derive _starttime, endtime and interval from arguments or fieldset defaults
        if runtime is not None and endtime is not None:
            raise RuntimeError(
                'Only one of (endtime, runtime) can be specified')
        _starttime = min([p.time for p in self]) if dt >= 0 else max(
            [p.time for p in self])
        if self.repeatdt is not None and self.repeat_starttime is None:
            self.repeat_starttime = _starttime
        if runtime is not None:
            if runtime < 0:
                runtime = np.abs(runtime)
                logger.warning(
                    "Negating runtime because it has to be positive")
            endtime = _starttime + runtime * np.sign(dt)
        elif endtime is None:
            endtime = self.fieldset.U.grid.time[
                -1] if dt >= 0 else self.fieldset.U.grid.time[0]
        if interval is None:
            interval = endtime - _starttime
        elif dt < 0 and interval > 0.:
            interval *= -1.
            logger.warning(
                "Negating interval because running in time-backward mode")

        if abs(endtime -
               _starttime) < 1e-5 or interval == 0 or dt == 0 or runtime == 0:
            timeleaps = 1
            dt = 0
            runtime = 0
            endtime = _starttime
            logger.warning_once(
                "dt or runtime are zero, or endtime is equal to Particle.time. "
                "The kernels will be executed once, without incrementing time")
        else:
            timeleaps = int((endtime - _starttime) / interval)

        if self.repeatdt is not None and self.repeatdt % interval != 0:
            raise ("repeatdt should be multiple of interval")

        # Initialise particle timestepping
        for p in self:
            p.dt = dt
            # set dt_initial to the original dt
            p.dt_initial = dt
        # Execute time loop in sub-steps (timeleaps)
        assert (timeleaps >= 0)
        leaptime = _starttime
        for _ in range(timeleaps):
            # First write output_file, because particles could have been added
            if output_file:
                output_file.write(self, leaptime)
            if show_movie:
                self.show(field=show_movie, show_time=leaptime)
            leaptime += interval
            self.kernel.execute(self,
                                endtime=leaptime,
                                dt=dt,
                                recovery=recovery)
            # Add new particles if repeatdt is used
            if self.repeatdt is not None and abs(
                    leaptime - self.repeat_starttime) % self.repeatdt == 0:
                self.add(
                    ParticleSet(fieldset=self.fieldset,
                                time=leaptime,
                                lon=self.repeatlon,
                                lat=self.repeatlat,
                                depth=self.repeatdepth,
                                pclass=self.repeatpclass))
        # Write out a final output_file
        if output_file:
            output_file.write(self, leaptime)
Esempio n. 8
0
    def show(self,
             particles=True,
             show_time=None,
             field=None,
             domain=None,
             land=False,
             vmin=None,
             vmax=None,
             savefile=None):
        """Method to 'show' a Parcels ParticleSet

        :param particles: Boolean whether to show particles
        :param show_time: Time at which to show the ParticleSet
        :param field: Field to plot under particles (either None, a Field object, or 'vector')
        :param domain: Four-vector (latN, latS, lonE, lonW) defining domain to show
        :param land: Boolean whether to show land (in field='vector' mode only)
        :param vmin: minimum colour scale (only in single-plot mode)
        :param vmax: maximum colour scale (only in single-plot mode)
        :param savefile: Name of a file to save the plot to
        """
        try:
            import matplotlib.pyplot as plt
        except:
            logger.info("Visualisation is not possible. Matplotlib not found.")
            return
        try:
            from mpl_toolkits.basemap import Basemap
        except:
            Basemap = None

        plon = np.array([p.lon for p in self])
        plat = np.array([p.lat for p in self])
        show_time = self[0].time if show_time is None else show_time
        if isinstance(show_time, datetime):
            show_time = np.datetime64(show_time)
        if isinstance(show_time, np.datetime64):
            if not self.time_origin:
                raise NotImplementedError(
                    'If fieldset.U.grid.time_origin is not a date, showtime cannot be a date in particleset.show()'
                )
            show_time = (show_time - self.time_origin) / np.timedelta64(1, 's')
        if isinstance(show_time, delta):
            show_time = show_time.total_seconds()
        if np.isnan(show_time):
            show_time = self.fieldset.U.grid.time[0]
        if domain is not None:

            def nearest_index(array, value):
                """returns index of the nearest value in array using O(log n) bisection method"""
                y = bisect.bisect(array, value)
                if y == len(array):
                    return y - 1
                elif (abs(array[y - 1] - value) < abs(array[y] - value)):
                    return y - 1
                else:
                    return y

            latN = nearest_index(self.fieldset.U.lat, domain[0])
            latS = nearest_index(self.fieldset.U.lat, domain[1])
            lonE = nearest_index(self.fieldset.U.lon, domain[2])
            lonW = nearest_index(self.fieldset.U.lon, domain[3])
        else:
            latN, latS, lonE, lonW = (-1, 0, -1, 0)
        if field is not 'vector' and not land:
            plt.ion()
            plt.clf()
            if particles:
                plt.plot(np.transpose(plon), np.transpose(plat), 'ko')
            if field is None:
                axes = plt.gca()
                axes.set_xlim(
                    [self.fieldset.U.lon[lonW], self.fieldset.U.lon[lonE]])
                axes.set_ylim(
                    [self.fieldset.U.lat[latS], self.fieldset.U.lat[latN]])
            else:
                if not isinstance(field, Field):
                    field = getattr(self.fieldset, field)
                field.show(with_particles=True,
                           show_time=show_time,
                           vmin=vmin,
                           vmax=vmax)
            xlbl = 'Zonal distance [m]' if type(
                self.fieldset.U.units
            ) is UnitConverter else 'Longitude [degrees]'
            ylbl = 'Meridional distance [m]' if type(
                self.fieldset.U.units
            ) is UnitConverter else 'Latitude [degrees]'
            plt.xlabel(xlbl)
            plt.ylabel(ylbl)
        elif Basemap is None:
            logger.info("Visualisation is not possible. Basemap not found.")
        else:
            self.fieldset.computeTimeChunk(show_time, 1)
            (idx, periods) = self.fieldset.U.time_index(show_time)
            show_time -= periods * (self.fieldset.U.time[-1] -
                                    self.fieldset.U.time[0])
            lon = self.fieldset.U.lon
            lat = self.fieldset.U.lat
            lon = lon[lonW:lonE]
            lat = lat[latS:latN]

            # configuring plot
            lat_median = np.median(lat)
            lon_median = np.median(lon)
            plt.figure()
            m = Basemap(projection='merc',
                        lat_0=lat_median,
                        lon_0=lon_median,
                        resolution='h',
                        area_thresh=100,
                        llcrnrlon=lon[0],
                        llcrnrlat=lat[0],
                        urcrnrlon=lon[-1],
                        urcrnrlat=lat[-1])
            parallels = np.arange(lat[0], lat[-1], abs(lat[0] - lat[-1]) / 5)
            parallels = np.around(parallels, 2)
            m.drawparallels(parallels, labels=[1, 0, 0, 0])
            meridians = np.arange(lon[0], lon[-1], abs(lon[0] - lon[-1]) / 5)
            meridians = np.around(meridians, 2)
            m.drawmeridians(meridians, labels=[0, 0, 0, 1])
            if land:
                m.drawcoastlines()
                m.fillcontinents(color='burlywood')
            if field is 'vector':
                # formating velocity data for quiver plotting
                U = np.array(
                    self.fieldset.U.temporal_interpolate_fullfield(
                        idx, show_time))
                V = np.array(
                    self.fieldset.V.temporal_interpolate_fullfield(
                        idx, show_time))
                U = U[latS:latN, lonW:lonE]
                V = V[latS:latN, lonW:lonE]
                U = np.array([
                    U[y, x] for x in range(len(lon)) for y in range(len(lat))
                ])
                V = np.array([
                    V[y, x] for x in range(len(lon)) for y in range(len(lat))
                ])
                speed = np.sqrt(U**2 + V**2)
                normU = U / speed
                normV = V / speed
                x = np.repeat(lon, len(lat))
                y = np.tile(lat, len(lon))

                # plotting velocity vector field
                vecs = m.quiver(x,
                                y,
                                normU,
                                normV,
                                speed,
                                cmap=plt.cm.gist_ncar,
                                clim=[vmin, vmax],
                                scale=50,
                                latlon=True)
                m.colorbar(vecs, "right", size="5%", pad="2%")
            elif field is not None:
                logger.warning(
                    'Plotting of both a field and land=True is not supported in this version of Parcels'
                )
            # plotting particle data
            if particles:
                xs, ys = m(plon, plat)
                m.scatter(xs, ys, color='black')

        if not self.time_origin:
            timestr = ' after ' + str(delta(seconds=show_time)) + ' hours'
        else:
            date_str = str(self.time_origin +
                           np.timedelta64(int(show_time), 's'))
            timestr = ' on ' + date_str[:10] + ' ' + date_str[11:19]

        if particles:
            if field is None:
                plt.title('Particles' + timestr)
            elif field is 'vector':
                plt.title('Particles and velocity field' + timestr)
            else:
                plt.title('Particles and ' + field.name + timestr)
        else:
            if field is 'vector':
                plt.title('Velocity field' + timestr)
            else:
                plt.title(field.name + timestr)

        if savefile is None:
            plt.show()
            plt.pause(0.0001)
        else:
            plt.savefig(savefile)
            logger.info('Plot saved to ' + savefile + '.png')
            plt.close()
Esempio n. 9
0
    def __init__(self,
                 name,
                 particleset,
                 outputdt=np.infty,
                 type='array',
                 write_ondelete=False):

        self.type = type
        self.name = name
        self.write_ondelete = write_ondelete
        self.outputdt = outputdt
        if self.write_ondelete and self.type is 'array':
            logger.warning(
                'ParticleFile.write_ondelete=True requires type="indexed". Setting that option'
            )
            self.type = 'indexed'
        self.outputdt = outputdt
        self.lasttime_written = None  # variable to check if time has been written already
        self.dataset = netCDF4.Dataset("%s.nc" % name, "w", format="NETCDF4")
        self.dataset.createDimension("obs", None)
        if self.type is 'array':
            self.dataset.createDimension("trajectory", particleset.size)
            coords = ("trajectory", "obs")
        elif self.type is 'indexed':
            coords = ("obs")
        else:
            raise RuntimeError(
                "ParticleFile type must be either 'array' or 'indexed'")
        self.dataset.feature_type = "trajectory"
        self.dataset.Conventions = "CF-1.6/CF-1.7"
        self.dataset.ncei_template_version = "NCEI_NetCDF_Trajectory_Template_v2.0"

        # Create ID variable according to CF conventions
        if self.type is 'array':
            self.id = self.dataset.createVariable("trajectory", "i4",
                                                  ("trajectory", ))
            self.id.long_name = "Unique identifier for each particle"
            self.id.cf_role = "trajectory_id"
            self.id[:] = np.array([p.id for p in particleset])
        elif self.type is 'indexed':
            self.id = self.dataset.createVariable("trajectory", "i4",
                                                  ("obs", ))
            self.id.long_name = "index of trajectory this obs belongs to"

        # Create time, lat, lon and z variables according to CF conventions:
        self.time = self.dataset.createVariable("time",
                                                "f8",
                                                coords,
                                                fill_value=np.nan)
        self.time.long_name = ""
        self.time.standard_name = "time"
        if particleset.time_origin == 0:
            self.time.units = "seconds"
        else:
            self.time.units = "seconds since " + str(particleset.time_origin)
            self.time.calendar = "julian"
        self.time.axis = "T"

        self.lat = self.dataset.createVariable("lat",
                                               "f4",
                                               coords,
                                               fill_value=np.nan)
        self.lat.long_name = ""
        self.lat.standard_name = "latitude"
        self.lat.units = "degrees_north"
        self.lat.axis = "Y"

        self.lon = self.dataset.createVariable("lon",
                                               "f4",
                                               coords,
                                               fill_value=np.nan)
        self.lon.long_name = ""
        self.lon.standard_name = "longitude"
        self.lon.units = "degrees_east"
        self.lon.axis = "X"

        self.z = self.dataset.createVariable("z",
                                             "f4",
                                             coords,
                                             fill_value=np.nan)
        self.z.long_name = ""
        self.z.standard_name = "depth"
        self.z.units = "m"
        self.z.positive = "down"

        self.user_vars = []
        self.user_vars_once = []
        """
        :user_vars: list of additional user defined particle variables to write for all particles and all times
        :user_vars_once: list of additional user defined particle variables to write for all particles only once at initial time. Only fully functional for type='array'
        """

        for v in particleset.ptype.variables:
            if v.name in ['time', 'lat', 'lon', 'depth', 'z', 'id']:
                continue
            if v.to_write:
                if v.to_write is True:
                    setattr(
                        self, v.name,
                        self.dataset.createVariable(v.name,
                                                    "f4",
                                                    coords,
                                                    fill_value=np.nan))
                    self.user_vars += [v.name]
                elif v.to_write == 'once':
                    setattr(
                        self, v.name,
                        self.dataset.createVariable(v.name,
                                                    "f4",
                                                    "trajectory",
                                                    fill_value=np.nan))
                    self.user_vars_once += [v.name]
                getattr(self, v.name).long_name = ""
                getattr(self, v.name).standard_name = v.name
                getattr(self, v.name).units = "unknown"

        self.idx = 0