Esempio n. 1
0
    def interp(self,
               timein,
               method='linear',
               timeformat='%Y%m%d.%H%M%S',
               axis=-1):
        """
        Interpolate the data onto an equally spaced vector
        
        timein is either:
            (3x1 tuple) - (tstart,tend,dt)
                tstart and tend - string with format 'yyyymmdd.HHMMSS'
        or
            datetime vector
        
        method - method passed to interp1d
               - use 'nearest' to preserve masking in gap regions
        """

        # Create the time vector
        try:
            tstart = timein[0]
            tend = timein[1]
            dt = timein[2]
            tnew = othertime.TimeVector(tstart,
                                        tend,
                                        dt,
                                        timeformat=timeformat)
        except:
            tnew = timein
            dt = (tnew[1] - tnew[0]).total_seconds()

        if method == 'nearest':
            # Nearest neighbour doesn't use interp1d to preserve mask
            self._evenly_dist_data(dt)
            tnew, output = self.subset(tnew[0], tnew[-1])

        else:

            t = othertime.SecondsSince(tnew, basetime=self.basetime)
            # Don't include nan points
            if self.ndim > 1:
                # Interpolate multidimensional arrays without a mask
                F = interpolate.interp1d(self.tsec,self.y,kind=method,axis=axis,\
                    bounds_error=False,fill_value=0)

                output = F(t)
            else:
                #mask = np.isnan(self.y) == False
                mask = ~self.y.mask
                F = interpolate.interp1d(self.tsec[mask],self.y[mask],kind=method,axis=axis,\
                bounds_error=False,fill_value=0)

                output = F(t)

        return tnew, output
Esempio n. 2
0
 def getTime(self,timeinfo):
     """
     Get the particle time step info
     """
     self.dt = timeinfo[2]
     
     self.time_track = othertime.TimeVector(timeinfo[0],timeinfo[1],timeinfo[2],timeformat ='%Y%m%d.%H%M%S')
     
     self.time_track_sec = othertime.SecondsSince(self.time_track)
     self.time_sec = othertime.SecondsSince(self.time)
     
     self.time_index = -9999
Esempio n. 3
0
    def __init__(self, x, y, z, timeinfo, tformat='%Y%m%d.%H%M%S', **kwargs):

        metfile.__init__(self, mode='create')

        self.x = x
        self.y = y
        self.z = z
        self.time =\
            otime.TimeVector(timeinfo[0],timeinfo[1],timeinfo[2],timeformat=tformat)
        self.nctime = otime.SecondsSince(self.time)

        self.varnames = [
            'Uwind', 'Vwind', 'Tair', 'Pair', 'RH', 'rain', 'cloud'
        ]

        # Update all of the metdata objects
        for vv in self.varnames:
            self.updateMetData(self[vv])
Esempio n. 4
0
def getAllTime(stationID, vartype, timestart, timeend):
    """Wrapper function to grab data for extened time periods in chunks"""

    # Get the start and end times - can only extract 4-day chunks
    timeList = []
    #
    #tnow = timestart
    #while tnow <= timeend:
    #    timeList.append(tnow)
    #    tnow += timedelta(days=4)
    timeList = othertime.TimeVector(timestart,
                                    timeend,
                                    4 * 86400.,
                                    istimestr=False)

    k = 0
    for t1s, t2s in zip(timeList[0:-1], timeList[1:]):

        t1 = datetime.strftime(t1s, '%Y-%m-%dT%H:%M:%SZ')
        t2 = datetime.strftime(t2s, '%Y-%m-%dT%H:%M:%SZ')
        print('Extracting dates %s to %s' % (t1, t2))
        # generate the url string
        target_url = 'http://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/SOS?service=SOS&request=GetObservation&version=1.0.0&observedProperty=' + vartype + '&offering=urn:ioos:station:NOAA.NOS.CO-OPS:' + stationID + '&responseFormat=text%2Fcsv&eventTime=' + t1 + '/' + t2

        # Get the data
        datatmp = getObsfromURL(target_url)

        # append the data
        if k == 0 and len(datatmp) > 1:
            data = datatmp
            k += 1
        elif k > 0 and len(datatmp) > 1:
            for vv in list(data.keys()):
                data[vv] += datatmp[vv]
    try:
        return data
    except:
        return []
Esempio n. 5
0
def runmpi(ncfile,
           outfile,
           tstart,
           tend,
           dt,
           dtout,
           x,
           y,
           z,
           agepoly=None,
           method='nearest',
           is3D=False):

    # Generate a list of tuples for the time info
    timevec = othertime.TimeVector(tstart,
                                   tend,
                                   dtout,
                                   timeformat='%Y%m%d.%H%M%S')
    timevec_sec = othertime.SecondsSince(timevec)
    timeinfos = []
    for ii in range(timevec.shape[0] - 1):
        if ii == 0:
            timestart = datetime.strftime(timevec[ii], '%Y%m%d.%H%M%S')
        else:
            timestart = datetime.strftime(timevec[ii] + timedelta(seconds=dt),
                                          '%Y%m%d.%H%M%S')

        timeend = datetime.strftime(timevec[ii + 1], '%Y%m%d.%H%M%S')
        timeinfos.append((timestart, timeend, dt))

    # Initialise the particle tracking object
    print('Initialising the particle tracking object on processor: %d...' %
          (comm.rank))
    sun = SunTrack(ncfile,
                   interp_method='mesh',
                   interp_meshmethod=method,
                   is3D=is3D)

    # Initialise the age values
    if not agepoly == None:
        calcage = True
    else:
        calcage = False
        age = None
        agemax = None

    # On rank = 0 only
    if comm.rank == 0:
        n = int(x.shape[0])

        # Check if the number of processes divides eveny into the array length
        rem = np.mod(n, size)
        if rem == 0:
            #length of each process's portion of the original vector
            local_n = np.array([n / size])
        else:
            print('Padding array with extra values...')
            nextra = size - rem
            xpad = np.zeros((nextra, ))
            x = np.hstack((x, xpad))
            y = np.hstack((y, xpad))
            z = np.hstack((z, xpad))

            n = n + nextra
            local_n = np.array([n / size])

        print('Size of original vector = %d\nSize of split vector = %d' %
              (n, local_n))

        if calcage:
            age = np.zeros_like(x)
            agemax = np.zeros_like(x)

        # Initialise the output netcdf file
        sun.initParticleNC(outfile, n, age=calcage)

    else:
        x = None
        y = None
        z = None
        local_n = np.array([0])
        if calcage:
            age = None
            agemax = None

    comm.Barrier()
    t_start = MPI.Wtime()
    # Broadcast the particle tracking object everywhere
    #sun = comm.bcast(sun, root=0) # !! Doesn't work for this object !!

    # Scatter the x,y,z locations up amongst all processors
    #communicate local array size to all processes
    comm.Bcast(local_n, root=0)

    #initialize the local particle arrays as numpy arrays
    x_local = np.zeros(local_n)
    y_local = np.zeros(local_n)
    z_local = np.zeros(local_n)
    if calcage:
        age_local = np.zeros(local_n)
        agemax_local = np.zeros(local_n)

    #divide up vectors
    comm.Scatter(x, x_local, root=0)
    comm.Scatter(y, y_local, root=0)
    comm.Scatter(z, z_local, root=0)
    if calcage:
        comm.Scatter(age, age_local, root=0)
        comm.Scatter(agemax, agemax_local, root=0)
    else:
        age_local = None
        agemax_local = None

    ###
    # Testing plot of particle positions
    ###
    #if comm.rank ==0:
    #   import matplotlib.pyplot as plt
    #    plt.figure
    #    plt.plot(x_local,y_local,'.')
    #    plt.show()
    #comm.Barrier()

    ###
    # Write out the initial location to netcdf
    if comm.rank == 0:
        sun.writeParticleNC(outfile,
                            x,
                            y,
                            z,
                            timevec_sec[0],
                            0,
                            age=age,
                            agemax=agemax)
    comm.Barrier()

    ###
    # ... Call the particle tracking module on each processor
    for ii, timeinfo in enumerate(timeinfos):
        if comm.rank == 0:
            sun(x_local,
                y_local,
                z_local,
                timeinfo,
                agepoly=agepoly,
                age=age_local,
                agemax=agemax_local,
                verbose=True)
        else:
            sun(x_local,
                y_local,
                z_local,
                timeinfo,
                agepoly=agepoly,
                age=age_local,
                agemax=agemax_local,
                verbose=False)

        # Send the particles back to their main array
        comm.Barrier()

        comm.Gather(sun.particles['X'], x, root=0)
        comm.Gather(sun.particles['Y'], y, root=0)
        comm.Gather(sun.particles['Z'], z, root=0)
        if calcage:
            comm.Gather(sun.particles['age'], age, root=0)
            comm.Gather(sun.particles['agemax'], agemax, root=0)

        # Write the output to a netcdf file
        if comm.rank == 0:
            sun.writeParticleNC(outfile,
                                x,
                                y,
                                z,
                                sun.time_track_sec[-1],
                                ii + 1,
                                age=age,
                                agemax=agemax)

        comm.Barrier()

    t_diff = MPI.Wtime() - t_start  ### Stop stopwatch ###
    if comm.rank == 0:
        print(78 * '=' + '\n' + 78 * '=')
        print('Completed particle tracking using %d cores in %6.2f seconds.' %
              (comm.size, t_diff))
        print(78 * '=' + '\n' + 78 * '=')