def get_UpcomingEvents(start ,stop, colour):
    try:
        credentials = get_Credentials()
        http = credentials.authorize(httplib2.Http())
        service = discovery.build('calendar', 'v3', http=http)

        if stop <= start:
            start = 0
            stop = 1

        now = datetime.combine(date.today() + delta(days=start), datetime.min.time()).isoformat() + 'Z'
        next = datetime.combine(date.today() + delta(days=stop), datetime.min.time()).isoformat() + 'Z'



        eventResult = service.events().list(calendarId='primary', timeMin=now, timeMax=next).execute()
        events = eventResult.get('items',[])

        if not events:
            return False
        else:
            colorCode = Convert_Colour(colour)
            for i in events:
                if dict(i).get('colorId', None) == colorCode:
                    return True
            else:
                return False
    except:
        return None
Exemplo n.º 2
0
    def show(self, **kwargs):
        if plt is None:
            raise RuntimeError("Visualisation not possible: matplotlib not found!")

        field = kwargs.get('field', True)
        t = kwargs.get('t', 0)
        lon = [p.lon for p in self]
        lat = [p.lat for p in self]
        plt.ion()
        plt.clf()
        plt.plot(np.transpose(lon), np.transpose(lat), 'ko')
        if field is True:
            axes = plt.gca()
            axes.set_xlim([self.grid.U.lon[0], self.grid.U.lon[-1]])
            axes.set_ylim([self.grid.U.lat[0], self.grid.U.lat[-1]])
            namestr = ''
            time_origin = self.grid.U.time_origin
        else:
            if not isinstance(field, Field):
                field = getattr(self.grid, field)
            field.show(animation=True, **kwargs)
            namestr = ' on ' + field.name
            time_origin = field.time_origin
        if time_origin is 0:
            timestr = ' after ' + str(delta(seconds=t)) + ' hours'
        else:
            timestr = ' on ' + str(time_origin + delta(seconds=t))
        plt.xlabel('Longitude')
        plt.ylabel('Latitude')
        plt.title('Particles' + namestr + timestr)
        plt.show()
        plt.pause(0.0001)
Exemplo n.º 3
0
def stommel_example(npart=1, mode='jit', verbose=False, method=AdvectionRK4):

    grid = stommel_grid()
    filename = 'stommel'
    grid.write(filename)

    # Determine particle class according to mode
    ParticleClass = JITParticle if mode == 'jit' else ScipyParticle

    class MyParticle(ParticleClass):
        p = Variable('p', dtype=np.float32, initial=0.)
        p_start = Variable('p_start', dtype=np.float32, initial=0.)

    pset = grid.ParticleSet(size=npart, pclass=MyParticle,
                            start=(100, 5000), finish=(200, 5000))
    for particle in pset:
        particle.p_start = grid.P[0., particle.lon, particle.lat]

    if verbose:
        print("Initial particle positions:\n%s" % pset)

    # Execute for 50 days, with 5min timesteps and hourly output
    runtime = delta(days=50)
    dt = delta(minutes=5)
    interval = delta(hours=12)
    print("Stommel: Advecting %d particles for %s" % (npart, runtime))
    pset.execute(method + pset.Kernel(UpdateP), runtime=runtime, dt=dt, interval=interval,
                 output_file=pset.ParticleFile(name="StommelParticle"), show_movie=False)

    if verbose:
        print("Final particle positions:\n%s" % pset)

    return pset
Exemplo n.º 4
0
def test_meridionalflow_sperical(mode, xdim=100, ydim=200):
    """ Create uniform NORTHWARD flow on sperical earth and advect particles

    As flow is so simple, it can be directly compared to analytical solution
    """

    maxvel = 1.
    lon = np.linspace(-180, 180, xdim, dtype=np.float32)
    lat = np.linspace(-90, 90, ydim, dtype=np.float32)
    U = np.zeros([xdim, ydim])
    V = maxvel * np.ones([xdim, ydim])

    grid = Grid.from_data(np.array(U, dtype=np.float32), lon, lat,
                          np.array(V, dtype=np.float32), lon, lat)

    lonstart = [0, 45]
    latstart = [0, 45]
    endtime = delta(hours=24)
    pset = grid.ParticleSet(2, pclass=pclass(mode), lon=lonstart, lat=latstart)
    pset.execute(pset.Kernel(AdvectionRK4), endtime=endtime, dt=delta(hours=1))

    assert(pset[0].lat - (latstart[0] + endtime.total_seconds() * maxvel / 1852 / 60) < 1e-4)
    assert(pset[0].lon - lonstart[0] < 1e-4)
    assert(pset[1].lat - (latstart[1] + endtime.total_seconds() * maxvel / 1852 / 60) < 1e-4)
    assert(pset[1].lon - lonstart[1] < 1e-4)
Exemplo n.º 5
0
def test_zonalflow_sperical(mode, k_sample_p, xdim=100, ydim=200):
    """ Create uniform EASTWARD flow on sperical earth and advect particles

    As flow is so simple, it can be directly compared to analytical solution
    Note that in this case the cosine conversion is needed
    """
    maxvel = 1.
    p_fld = 10
    lon = np.linspace(-180, 180, xdim, dtype=np.float32)
    lat = np.linspace(-90, 90, ydim, dtype=np.float32)
    V = np.zeros([xdim, ydim])
    U = maxvel * np.ones([xdim, ydim])
    P = p_fld * np.ones([xdim, ydim])

    grid = Grid.from_data(np.array(U, dtype=np.float32), lon, lat,
                          np.array(V, dtype=np.float32), lon, lat,
                          field_data={'P': np.array(P, dtype=np.float32)})

    lonstart = [0, 45]
    latstart = [0, 45]
    endtime = delta(hours=24)
    pset = grid.ParticleSet(2, pclass=pclass(mode), lon=lonstart, lat=latstart)
    pset.execute(pset.Kernel(AdvectionRK4) + k_sample_p,
                 endtime=endtime, dt=delta(hours=1))

    assert(pset[0].lat - latstart[0] < 1e-4)
    assert(pset[0].lon - (lonstart[0] + endtime.total_seconds() * maxvel / 1852 / 60
                          / cos(latstart[0] * pi / 180)) < 1e-4)
    assert(abs(pset[0].p - p_fld) < 1e-4)
    assert(pset[1].lat - latstart[1] < 1e-4)
    assert(pset[1].lon - (lonstart[1] + endtime.total_seconds() * maxvel / 1852 / 60
                          / cos(latstart[1] * pi / 180)) < 1e-4)
    assert(abs(pset[1].p - p_fld) < 1e-4)
Exemplo n.º 6
0
def moving_eddies_example(grid, npart=2, mode='jit', verbose=False,
                          method=AdvectionRK4):
    """Configuration of a particle set that follows two moving eddies

    :arg grid: :class Grid: that defines the flow field
    :arg npart: Number of particles to intialise"""

    # Determine particle class according to mode
    pset = grid.ParticleSet(size=npart, pclass=ptype[mode],
                            start=(3.3, 46.), finish=(3.3, 47.8))

    if verbose:
        print("Initial particle positions:\n%s" % pset)

    # Execte for 21 days, with 5min timesteps and hourly output
    endtime = delta(days=21)
    print("MovingEddies: Advecting %d particles for %s" % (npart, str(endtime)))
    pset.execute(method, endtime=endtime, dt=delta(minutes=5),
                 output_file=pset.ParticleFile(name="EddyParticle"),
                 interval=delta(hours=1), show_movie=False)

    if verbose:
        print("Final particle positions:\n%s" % pset)

    return pset
Exemplo n.º 7
0
def stommel_example(grid, npart=1, mode='jit', verbose=False,
                    method=AdvectionRK4):
    """Configuration of a particle set that follows two moving eddies

    :arg grid: :class NEMOGrid: that defines the flow field
    :arg npart: Number of particles to intialise"""

    # Determine particle class according to mode
    ParticleClass = JITParticle if mode == 'jit' else Particle
    pset = grid.ParticleSet(size=npart, pclass=ParticleClass,
                            start=(10., 50.), finish=(7., 30.))

    if verbose:
        print("Initial particle positions:\n%s" % pset)

    # Execute for 25 days, with 5min timesteps and hourly output
    endtime = delta(days=25)
    dt = delta(minutes=5)
    interval = delta(hours=12)
    print("Stommel: Advecting %d particles for %s" % (npart, endtime))
    pset.execute(method, endtime=endtime, dt=dt, interval=interval,
                 output_file=pset.ParticleFile(name="StommelParticle"))

    if verbose:
        print("Final particle positions:\n%s" % pset)

    return pset
def test_moving_eddies_fwdbwd(mode, npart=2):
    method = AdvectionRK4
    grid = moving_eddies_grid()

    # Determine particle class according to mode
    ParticleClass = JITParticle if mode == 'jit' else Particle

    pset = grid.ParticleSet(size=npart, pclass=ParticleClass,
                            start=(3.3, 46.), finish=(3.3, 47.8))

    # Execte for 14 days, with 30sec timesteps and hourly output
    endtime = delta(days=1)
    dt = delta(minutes=5)
    interval = delta(hours=1)
    print("MovingEddies: Advecting %d particles for %s" % (npart, str(endtime)))
    pset.execute(method, starttime=0, endtime=endtime, dt=dt, interval=interval,
                 output_file=pset.ParticleFile(name="EddyParticlefwd"))

    print("Now running in backward time mode")
    pset.execute(method, starttime=endtime, endtime=0, dt=-dt, interval=-interval,
                 output_file=pset.ParticleFile(name="EddyParticlebwd"))

    assert(pset[0].lon > 3.2 and 45.9 < pset[0].lat < 46.1)
    assert(pset[1].lon > 3.2 and 47.7 < pset[1].lat < 47.9)

    return pset
Exemplo n.º 9
0
 def setUp(self):
     os.mkdir(self._backupHome)
     date = dt.now()
     os.mkdir(join(self._backupHome, date.strftime(self._format)))
     date = date - delta(days=1)
     os.mkdir(join(self._backupHome, date.strftime(self._format)))
     date = date - delta(hours=1)
     os.mkdir(join(self._backupHome, date.strftime(self._format)))
Exemplo n.º 10
0
 def test_customer_is_granted_access_if_acknowledged_for_2_minutes(self):
     paywall = Paywall()
     customer = Customer('95015843')
     paywall.request_payment(customer, when=dt.now()-delta(minutes=5))
     paywall.acknowledge(customer, when=dt.now()-delta(minutes=3))
     status = paywall.get_status(customer)
     self.assertEqual(status, paywall.status['timeout'])
     self.assertTrue(paywall.has_access(customer))
def decaying_moving_example(grid, mode='scipy', method=AdvectionRK4):
    pset = grid.ParticleSet(size=1, pclass=ptype[mode], lon=start_lon, lat=start_lat)

    endtime = delta(days=2)
    dt = delta(minutes=5)
    interval = delta(hours=1)

    pset.execute(method, endtime=endtime, dt=dt, interval=interval,
                 output_file=pset.ParticleFile(name="DecayingMovingParticle"), show_movie=False)

    return pset
Exemplo n.º 12
0
def test_stationary_eddy(grid_stationary, mode, method, rtol, npart=1):
    grid = grid_stationary
    lon = np.linspace(12000, 21000, npart, dtype=np.float32)
    lat = np.linspace(12500, 12500, npart, dtype=np.float32)
    pset = grid.ParticleSet(size=npart, pclass=ptype[mode], lon=lon, lat=lat)
    endtime = delta(hours=6).total_seconds()
    pset.execute(kernel[method], dt=delta(minutes=3), endtime=endtime)
    exp_lon = [truth_stationary(x, y, endtime)[0] for x, y, in zip(lon, lat)]
    exp_lat = [truth_stationary(x, y, endtime)[1] for x, y, in zip(lon, lat)]
    assert np.allclose(np.array([p.lon for p in pset]), exp_lon, rtol=rtol)
    assert np.allclose(np.array([p.lat for p in pset]), exp_lat, rtol=rtol)
Exemplo n.º 13
0
def pensinsula_example(grid, npart, mode='jit', degree=1,
                       verbose=False, output=True, method=AdvectionRK4):
    """Example configuration of particle flow around an idealised Peninsula

    :arg filename: Basename of the input grid file set
    :arg npart: Number of particles to intialise"""

    # Determine particle class according to mode
    ParticleClass = JITParticle if mode == 'jit' else Particle

    # First, we define a custom Particle class to which we add a
    # custom variable, the initial stream function value p
    class MyParticle(ParticleClass):
        # JIT compilation requires a-priori knowledge of the particle
        # data structure, so we define additional variables here.
        user_vars = {'p': np.float32, 'p_start': np.float32}

        def __init__(self, *args, **kwargs):
            """Custom initialisation function which calls the base
            initialisation and adds the instance variable p"""
            super(MyParticle, self).__init__(*args, **kwargs)
            self.p = 0.
            self.p_start = 0.

        def __repr__(self):
            """Custom print function which overrides the built-in"""
            return "P(%.4f, %.4f)[p=%.5f, p_start=%f]" % (self.lon, self.lat,
                                                          self.p, self.p_start)

    # Initialise particles
    x = 3. * (1. / 1.852 / 60)  # 3 km offset from boundary
    y = (grid.U.lat[0] + x, grid.U.lat[-1] - x)  # latitude range, including offsets
    pset = grid.ParticleSet(npart, pclass=MyParticle, start=(x, y[0]), finish=(x, y[1]))
    for particle in pset:
        particle.p_start = grid.P[0., particle.lon, particle.lat]

    if verbose:
        print("Initial particle positions:\n%s" % pset)

    # Advect the particles for 24h
    time = delta(hours=24)
    dt = delta(minutes=5)
    k_adv = pset.Kernel(method)
    k_p = pset.Kernel(UpdateP)
    out = pset.ParticleFile(name="MyParticle") if output else None
    interval = delta(hours=1) if output else -1
    print("Peninsula: Advecting %d particles for %s" % (npart, str(time)))
    pset.execute(k_adv + k_p, endtime=time, dt=dt, output_file=out, interval=interval)

    if verbose:
        print("Final particle positions:\n%s" % pset)

    return pset
Exemplo n.º 14
0
def test_advection_zonal(lon, lat, mode, npart=10):
    """ Particles at high latitude move geographically faster due to
        the pole correction in `GeographicPolar`.
    """
    U = np.ones((lon.size, lat.size), dtype=np.float32)
    V = np.zeros((lon.size, lat.size), dtype=np.float32)
    grid = Grid.from_data(U, lon, lat, V, lon, lat, mesh='spherical')

    pset = grid.ParticleSet(npart, pclass=ptype[mode],
                            lon=np.zeros(npart, dtype=np.float32) + 20.,
                            lat=np.linspace(0, 80, npart, dtype=np.float32))
    pset.execute(AdvectionRK4, endtime=delta(hours=2), dt=delta(seconds=30))
    assert (np.diff(np.array([p.lon for p in pset])) > 1.e-4).all()
Exemplo n.º 15
0
def test_ofam_particles(mode):
    grid = set_ofam_grid()

    lonstart = [180]
    latstart = [10]

    pset = grid.ParticleSet(len(lonstart), pclass=ptype[mode], lon=lonstart, lat=latstart)

    pset.execute(AdvectionRK4, runtime=delta(days=10), dt=delta(minutes=5),
                 interval=delta(hours=6))

    assert(abs(pset[0].lon - 173) < 1)
    assert(abs(pset[0].lat - 11) < 1)
def test_globcurrent_particles(mode):
    grid = set_globcurrent_grid()

    lonstart = [25]
    latstart = [-35]

    ParticleClass = JITParticle if mode == 'jit' else Particle
    pset = grid.ParticleSet(len(lonstart), pclass=ParticleClass, lon=lonstart, lat=latstart)

    pset.execute(AdvectionRK4, runtime=delta(days=1), dt=delta(minutes=5),
                 interval=delta(hours=1))

    assert(abs(pset[0].lon - 23.8) < 1)
    assert(abs(pset[0].lat - -35.3) < 1)
Exemplo n.º 17
0
def rotation_example(grid, mode='jit', method=AdvectionRK4):

    npart = 2          # Test two particles on the rotating grid.
    pset = grid.ParticleSet(size=npart, pclass=ptype[mode],
                            start=(30., 30.),
                            finish=(30., 50.))  # One particle in centre, one on periphery of grid.

    endtime = delta(hours=17)
    dt = delta(minutes=5)
    interval = delta(hours=1)

    pset.execute(method, endtime=endtime, dt=dt, interval=interval,
                 output_file=pset.ParticleFile(name="RadialParticle"), show_movie=False)

    return pset
Exemplo n.º 18
0
 def test_customer_is_not_granted_access_if_requested_for_20_minutes_ago(self):
     paywall = Paywall()
     customer = Customer('95015843')
     paywall.request_payment(customer, when=dt.now()-delta(minutes=20))
     status = paywall.get_status(customer)
     self.assertEqual(status, None)
     self.assertFalse(paywall.has_access(customer))
Exemplo n.º 19
0
def radial_rotation_grid(xdim=200, ydim=200):  # Define 2D flat, square grid for testing purposes.

    lon = np.linspace(0, 60, xdim, dtype=np.float32)
    lat = np.linspace(0, 60, ydim, dtype=np.float32)

    x0 = 30.                                   # Define the origin to be the centre of the grid.
    y0 = 30.

    U = np.zeros((xdim, ydim), dtype=np.float32)
    V = np.zeros((xdim, ydim), dtype=np.float32)

    T = delta(days=1)
    omega = 2*np.pi/T.total_seconds()          # Define the rotational period as 1 day.

    for i in range(lon.size):
        for j in range(lat.size):

            r = np.sqrt((lon[i]-x0)**2 + (lat[j]-y0)**2)  # Define radial displacement.
            assert(r >= 0.)
            assert(r <= np.sqrt(x0**2 + y0**2))

            theta = math.atan2((lat[j]-y0), (lon[i]-x0))  # Define the polar angle.
            assert(abs(theta) <= np.pi)

            U[i, j] = r * math.sin(theta) * omega
            V[i, j] = -r * math.cos(theta) * omega

    return Grid.from_data(U, lon, lat, V, lon, lat, mesh='flat')
Exemplo n.º 20
0
 def __setend(self, window):
     """
     Set the proper window ending.
     Performs:
       - Convert to string if L{dt} object.
       - Default begin plus 1 hour when value is (None).
     @param window: The window specification.
     @type window: dict
     @return: The updated I{window}.
     @rtype: dict
     """
     END = 'end'
     if END in window:
         v = window[END]
         if not v:
             v = dt.utcnow()+delta(hours=1)
         if isinstance(v, dt):
             v = v.strftime(self.FORMAT)
         window[END] = v
     else:
         if not self.__hasduration(window):
             raise Exception,\
                 'Window() must have "end" or one of: %s' % \
                 str(self.DURATION)
     return window
Exemplo n.º 21
0
def test_zonalflow_sperical(mode, xdim=100, ydim=200):
    """ Create uniform EASTWARD flow on sperical earth and advect particles

    As flow is so simple, it can be directly compared to analytical solution
    Note that in this case the cosine conversion is needed
    """

    ParticleClass = JITParticle if mode == 'jit' else Particle

    class MyParticle(ParticleClass):
        user_vars = {'p': np.float32}

        def __init__(self, *args, **kwargs):
            super(MyParticle, self).__init__(*args, **kwargs)
            self.p = 1.

        def __repr__(self):
            return "P(%.4f, %.4f)[p=%.5f]" % (self.lon, self.lat, self.p)

    maxvel = 1.
    p_fld = 10
    lon = np.linspace(-180, 180, xdim, dtype=np.float32)
    lat = np.linspace(-90, 90, ydim, dtype=np.float32)
    V = np.zeros([xdim, ydim])
    U = maxvel * np.ones([xdim, ydim])
    P = p_fld * np.ones([xdim, ydim])

    grid = Grid.from_data(np.array(U, dtype=np.float32), lon, lat,
                          np.array(V, dtype=np.float32), lon, lat,
                          field_data={'P': np.array(P, dtype=np.float32)})

    lonstart = [0, 45]
    latstart = [0, 45]
    endtime = delta(hours=24)
    pset = grid.ParticleSet(2, pclass=MyParticle, lon=lonstart, lat=latstart)
    pset.execute(pset.Kernel(AdvectionRK4) + pset.Kernel(UpdateP),
                 endtime=endtime, dt=delta(hours=1))

    assert(pset[0].lat - latstart[0] < 1e-4)
    assert(pset[0].lon - (lonstart[0] + endtime.total_seconds() * maxvel / 1852 / 60
                          / cos(latstart[0] * pi / 180)) < 1e-4)
    assert(abs(pset[0].p - p_fld) < 1e-4)
    assert(pset[1].lat - latstart[1] < 1e-4)
    assert(pset[1].lon - (lonstart[1] + endtime.total_seconds() * maxvel / 1852 / 60
                          / cos(latstart[1] * pi / 180)) < 1e-4)
    assert(abs(pset[1].p - p_fld) < 1e-4)
Exemplo n.º 22
0
def grid_stationary(xdim=100, ydim=100, maxtime=delta(hours=6)):
    """Generate a grid encapsulating the flow field of a stationary eddy.

    Reference: N. Fabbroni, 2009, "Numerical simulations of passive
    tracers dispersion in the sea"
    """
    lon = np.linspace(0, 25000, xdim, dtype=np.float32)
    lat = np.linspace(0, 25000, ydim, dtype=np.float32)
    time = np.arange(0., maxtime.total_seconds(), 60., dtype=np.float64)
    U = np.ones((xdim, ydim, 1), dtype=np.float32) * u_0 * np.cos(f * time)
    V = np.ones((xdim, ydim, 1), dtype=np.float32) * -u_0 * np.sin(f * time)
    return Grid.from_data(U, lon, lat, V, lon, lat, time=time, mesh='flat')
Exemplo n.º 23
0
def test_delay_start_example(mode, npart=10, show_movie=False):
    """Example script that shows how to 'delay' the start of particle advection.
    This is useful for example when particles need to be started at different times

    In this example, we use pset.add statements to add one particle every hour
    in the peninsula grid. Note that the title in the movie may not show correct time"""

    grid = Grid.from_nemo('examples/Peninsula_data/peninsula', extra_vars={'P': 'P'})

    # Initialise particles as in the Peninsula example
    x = 3. * (1. / 1.852 / 60)  # 3 km offset from boundary
    y = (grid.U.lat[0] + x, grid.U.lat[-1] - x)  # latitude range, including offsets

    lat = np.linspace(y[0], y[1], npart, dtype=np.float32)
    pset = grid.ParticleSet(0, lon=[], lat=[], pclass=ptype[mode])

    delaytime = delta(hours=1)  # delay time between particle releases
    for t in range(npart):
        pset.add(ptype[mode](lon=x, lat=lat[t], grid=grid))
        pset.execute(AdvectionRK4, runtime=delaytime, dt=delta(minutes=5),
                     interval=delta(hours=1), show_movie=show_movie)

    # Note that time on the movie is not parsed correctly
    pset.execute(AdvectionRK4, runtime=delta(hours=24)-npart*delaytime,
                 dt=delta(minutes=5), interval=delta(hours=1), show_movie=show_movie)

    londist = np.array([(p.lon - x) for p in pset])
    assert(londist > 0.1).all()
Exemplo n.º 24
0
  def get(self, url, now=None):
    #ym=2009.3
    #vmode=itiran
    if now is None:
      now = dt.now()
    tomorrow = now + delta(1)

    option = urllib.urlencode(dict(
          ym='%i.%i'%(now.year, now.month),
          vmode='itiran'
          ))
    url += '?' + option
    self.write('getting feed from "%s".\n'%(url))

    p = Parser()
    f = urllib.urlopen(url)
    try:
      p.feed(f.read().decode('Shift-JIS'))
      p.goahead(0)
    finally:
      f.close()

    pages = []
    for a in p.findAll('a', attrs=dict(href="javaScript:void(0)")):
      m = parseOnClick.search(a['onclick'])
      if m:
        d = m.groupdict()
        #print d['year'], d['month'], d['day'], d['id']
        memo = '''http://www.backgammon.gr.jp/EventSchedule/calendar/calendar.cgi'''
        if int(d['day']) == tomorrow.day:
          option = urllib.urlencode(dict(
              action='memo',
              yy=d['year'],
              mm=d['month'],
              dd=d['day'],
              id=d['id'],
              ))
          f = urllib.urlopen(memo + '?' + option)
          try:
            uhtml = f.read().decode('Shift-JIS')
          finally:
            f.close()
          pages.append(Item(self, uhtml))
    return pages
Exemplo n.º 25
0
def fieldset_stationary(xdim=100, ydim=100, maxtime=delta(hours=6)):
    """Generate a FieldSet encapsulating the flow field of a stationary eddy.

    Reference: N. Fabbroni, 2009, "Numerical simulations of passive
    tracers dispersion in the sea"
    """
    time = np.arange(0., maxtime.total_seconds(), 60., dtype=np.float64)
    dimensions = {
        'lon': np.linspace(0, 25000, xdim, dtype=np.float32),
        'lat': np.linspace(0, 25000, ydim, dtype=np.float32),
        'time': time
    }
    data = {
        'U': np.ones(
            (xdim, ydim, 1), dtype=np.float32) * u_0 * np.cos(f * time),
        'V': np.ones(
            (xdim, ydim, 1), dtype=np.float32) * -u_0 * np.sin(f * time)
    }
    return FieldSet.from_data(data, dimensions, mesh='flat')
Exemplo n.º 26
0
def test_ofam_xarray_vs_netcdf(dt):
    fieldsetNetcdf = set_ofam_fieldset(use_xarray=False)
    fieldsetxarray = set_ofam_fieldset(use_xarray=True)
    lonstart, latstart, runtime = (180, 10, delta(days=7))

    psetN = ParticleSet(fieldsetNetcdf,
                        pclass=JITParticle,
                        lon=lonstart,
                        lat=latstart)
    psetN.execute(AdvectionRK4, runtime=runtime, dt=dt)

    psetX = ParticleSet(fieldsetxarray,
                        pclass=JITParticle,
                        lon=lonstart,
                        lat=latstart)
    psetX.execute(AdvectionRK4, runtime=runtime, dt=dt)

    assert np.allclose(psetN[0].lon, psetX[0].lon)
    assert np.allclose(psetN[0].lat, psetX[0].lat)
Exemplo n.º 27
0
    def run(self):
        now = datetime.datetime.now()
        mid = datetime.datetime(now.year, now.month,
                                now.day) + datetime.timedelta(1)
        ma5 = get_yesterday_ma5(self.ticker)
        target_price = get_target_price(self.ticker)
        wait_flag = False

        while self.alive:
            try:
                now = datetime.datetime.now()
                if mid < now < mid + datetime.delta(seconds=10):
                    target_price = get_target_price(self.ticker)
                    mid = datetime.datetime(now.year, now.month,
                                            now.day) + datetime.timedelta(1)
                    ma5 = get_yesterday_ma5(self.ticker)
                    desc = sell_crypto_currency(self.bithumb, self.ticker)

                    result = self.bithumb.get_order_completed(desc)
                    timestamp = result['data']['order_date']
                    dt = datetime.datetime.fromtimestamp(
                        int(int(timestamp) / 1000000))
                    tstring = dt.strftime("%Y/%m/%d %H:%M:%S")
                    self.tradingSent.emit(tstring, "매도",
                                          result['data']['order_qty'])
                    wait_flag = False

                if wait_flag == False:
                    current_price = pybithumb.get_current_price(self.ticker)
                    if (current_price > target_price) and (current_price >
                                                           ma5):
                        desc = buy_crypto_currency(self.bithumb, self.ticker)
                        result = self.bithumb.get_order_completed(desc)
                        timestamp = result['data']['order_date']
                        dt = datetime.datetime.fromtimestamp(
                            int(int(timestamp) / 1000000))
                        tstring = dt.strftime("%Y/%m/%d %H:%M:%S")
                        self.tradingSent.emit(tstring, "매수",
                                              result['data']['order_qty'])
                        wait_flag = True
            except:
                pass
            time.sleep(1)
Exemplo n.º 28
0
def grid_stationary(xdim=100, ydim=100, maxtime=delta(hours=6)):
    """Generate a grid encapsulating the flow field of a stationary eddy.

    Reference: N. Fabbroni, 2009, "Numerical simulations of passive
    tracers dispersion in the sea"
    """
    lon = np.linspace(0, 25000, xdim, dtype=np.float32)
    lat = np.linspace(0, 25000, ydim, dtype=np.float32)
    time = np.arange(0., maxtime.total_seconds(), 60., dtype=np.float64)
    U = np.ones((xdim, ydim, 1), dtype=np.float32) * u_0 * np.cos(f * time)
    V = np.ones((xdim, ydim, 1), dtype=np.float32) * -u_0 * np.sin(f * time)
    return Grid.from_data(np.asarray(U, np.float32),
                          lon,
                          lat,
                          np.asarray(V, np.float32),
                          lon,
                          lat,
                          time=time,
                          mesh='flat')
Exemplo n.º 29
0
def run_northsea_mp(outfile,
                    nemo_res='0083',
                    cmems=False,
                    stokes=False,
                    diffusion=0,
                    run3D=False):
    fieldset = get_nemo_fieldset(nemo_res, run3D)
    if cmems:
        set_cmems(fieldset)
    if stokes:
        set_stokes(fieldset)
    if diffusion > 0:
        set_diffusion(fieldset, diffusion)

    set_unbeaching(fieldset)
    pset = get_particle_set(fieldset, run3D)

    kernel = pset.Kernel(AdvectionRK4_3D) if run3D else pset.Kernel(
        AdvectionRK4)
    BeachTesting = BeachTesting_3D if run3D else BeachTesting_2D
    kernel += pset.Kernel(BeachTesting) + pset.Kernel(UnBeaching)
    if stokes:
        kernel += pset.Kernel(StokesDrag) + pset.Kernel(BeachTesting)
    if diffusion > 0:
        kernel += pset.Kernel(BrownianMotion2D) + pset.Kernel(BeachTesting)
    kernel += pset.Kernel(Ageing)

    pfile = ParticleFile(outfile, pset)
    pfile.write(pset, pset[0].time)

    tic = timelib.time()
    ndays = 365 * 4 + 100
    for d in range(ndays / 2):
        day = 2 * d
        print('running %d / %d [time %g s]: %d particles ' %
              (day, ndays, timelib.time() - tic, len(pset)))
        pset.execute(kernel,
                     runtime=delta(days=2),
                     dt=900,
                     verbose_progress=False,
                     recovery={ErrorCode.ErrorOutOfBounds: DeleteParticle})
        pfile.write(pset, pset[0].time)
Exemplo n.º 30
0
def test_globcurrent_netcdf_timestamps(dt):
    fieldsetNetcdf = set_globcurrent_fieldset()
    timestamps = fieldsetNetcdf.U.grid.timeslices
    fieldsetTimestamps = set_globcurrent_fieldset(timestamps=timestamps)
    lonstart, latstart, runtime = (25, -35, delta(days=7))

    psetN = ParticleSet(fieldsetNetcdf,
                        pclass=JITParticle,
                        lon=lonstart,
                        lat=latstart)
    psetN.execute(AdvectionRK4, runtime=runtime, dt=dt)

    psetT = ParticleSet(fieldsetTimestamps,
                        pclass=JITParticle,
                        lon=lonstart,
                        lat=latstart)
    psetT.execute(AdvectionRK4, runtime=runtime, dt=dt)

    assert np.allclose(psetN.lon[0], psetT.lon[0])
    assert np.allclose(psetN.lat[0], psetT.lat[0])
Exemplo n.º 31
0
def parse_input(s):
    """Parse the given input and intelligently transform it into an absolute,
    non-naive, timezone-aware datetime object for the UTC timezone.

    The input can be specified as a millisecond-precision UTC timestamp (or
    delta against Epoch), with or without a terminating 'L'. Alternatively, the
    input can be specified as a human-readable delta string with unit-separated
    segments, like '24d6h4m500' (24 days, 6 hours, 4 minutes and 500ms), as
    long as the segments are in descending unit span order."""
    if isinstance(s, six.integer_types):
        s = str(s)
    elif not isinstance(s, six.string_types):
        raise ValueError(s)

    original = s

    if s[-1:] == 'L':
        s = s[:-1]

    sign = {'-': -1, '=': 0, '+': 1}.get(s[0], None)
    if sign is not None:
        s = s[1:]

    ts = 0
    for unit in _SORTED_UNITS:
        pos = s.find(unit[0])
        if pos == 0:
            raise ValueError(original)
        elif pos > 0:
            # If we find a unit letter, we're dealing with an offset. Default
            # to positive offset if a sign wasn't specified.
            if sign is None:
                sign = 1
            ts += int(s[:pos]) * __timedelta_millis(unit[1])
            s = s[min(len(s), pos + 1):]

    if s:
        ts += int(s)

    return date_from_utc_ts(ts) if not sign else \
        utc() + sign * delta(milliseconds=ts)
Exemplo n.º 32
0
def parse_input(s):
    """Parse the given input and intelligently transform it into an absolute,
    non-naive, timezone-aware datetime object for the UTC timezone.

    The input can be specified as a millisecond-precision UTC timestamp (or
    delta against Epoch), with or without a terminating 'L'. Alternatively, the
    input can be specified as a human-readable delta string with unit-separated
    segments, like '24d6h4m500' (24 days, 6 hours, 4 minutes and 500ms), as
    long as the segments are in descending unit span order."""
    if isinstance(s, six.integer_types):
        s = str(s)
    elif not isinstance(s, six.string_types):
        raise ValueError(s)

    original = s

    if s[-1:] == 'L':
        s = s[:-1]

    sign = {'-': -1, '=': 0, '+': 1}.get(s[0], None)
    if sign is not None:
        s = s[1:]

    ts = 0
    for unit in _SORTED_UNITS:
        pos = s.find(unit[0])
        if pos == 0:
            raise ValueError(original)
        elif pos > 0:
            # If we find a unit letter, we're dealing with an offset. Default
            # to positive offset if a sign wasn't specified.
            if sign is None:
                sign = 1
            ts += int(s[:pos]) * __timedelta_millis(unit[1])
            s = s[min(len(s), pos + 1):]

    if s:
        ts += int(s)

    return date_from_utc_ts(ts) if not sign else \
        utc() + sign * delta(milliseconds=ts)
    def test_master_search_two_services(self, _calculate_eta):
        master = Master.objects.get(first_name='VASYA')
        schedule = master.get_schedule(utils.get_date(1))
        schedule.delete()
        schedule = Schedule.objects.create(master=master, date=timezone.now()
                                                               + delta(days=1))
        schedule.save()

        TimeSlot.objects.create(time=Time.objects.create(hour=10, minute=30),
                                taken=False, schedule=schedule)
        TimeSlot.objects.create(time=Time.objects.create(hour=11, minute=00),
                                taken=False, schedule=schedule)
        TimeSlot.objects.create(time=Time.objects.create(hour=11, minute=30),
                                taken=False, schedule=schedule)
        TimeSlot.objects.create(time=Time.objects.create(hour=12, minute=00),
                                taken=False, schedule=schedule)
        TimeSlot.objects.create(time=Time.objects.create(hour=12, minute=30),
                                taken=False, schedule=schedule)
        TimeSlot.objects.create(time=Time.objects.create(hour=13, minute=00),
                                taken=False, schedule=schedule)

        # assume all slots are reachable
        _calculate_eta.return_value = 10

        url = reverse(MasterSearchView.view_name)
        service_ids = ','.join(
            [str(service.id) for service in master.services.all()[0:2]])
        resp = self.client.get(
            f"{url}?services={service_ids}&coordinates=10,20")
        self.assertEqual(resp.status_code, status.HTTP_200_OK)

        # no favorites in this test
        favorites = resp.data['favorites']
        self.assertEqual(len(favorites), 0)
        others = resp.data['others']
        # both do at least one service in the following week
        self.assertEqual(len(others), 1)

        # one day with 5 slots
        day_one_slots = others[0]['available_slots']
        self.assertEqual(len(day_one_slots), 1)
Exemplo n.º 34
0
 def __dates(self, now=None):
     """
     Convert to datetime objects.
     @param now: The current UTC time.
     @type now: datetime
     @return: (begin, end)
     @rtype: (datetime, datetime)
     """
     DURATION = ('days', 'seconds', 'minutes', 'hours', 'weeks')
     if self.begin:
         begin = dt.strptime(self.begin, self.FORMAT)
     else:
         begin = (now or td.utcnow())
     if self.end:
         end = dt.strptime(self.begin, self.FORMAT)
     else:
         end = begin
     for k,v in self.__dict__.items():
         if k in DURATION:
             end = end+delta(**{k:v})
     return (begin, end)
Exemplo n.º 35
0
    def test_cancel_order_too_late(self):
        master = Master.objects.get(first_name='VASYA')

        service = master.services.all()[0]
        # manually creating an order

        target_date = timezone.now() + delta(days=1)
        order_1, _ = make_order(client=self.client_object,
                                master=master,
                                service=service,
                                order_date=target_date,
                                order_time=datetime.time(hour=11, minute=00))

        frozen = freeze_time(target_date.replace(hour=9, minute=0))
        frozen.start()
        resp = self.client.delete(
            reverse(OrderCancelView.view_name, args=[order_1.id]))
        frozen.stop()

        # too late
        self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
Exemplo n.º 36
0
    def __init__(self, d_upper: dt, minesite: str = 'FortHills'):
        super().__init__()
        a, b = self.a, self.b

        # make full year range
        d_lower = d_upper + relativedelta(years=-1) + delta(days=1)

        _year_month = cfn('FORMAT', ['date', 'format'])  # year_month(a.DateAdded, 'yyyy-MM')
        year_month = _year_month(a.DateTSISubmission, 'yyyy-MM')
        cols = [year_month.as_('period'), fn.Count(pk.terms.Star()).as_('num')]

        q = Query.from_(a) \
            .select(*cols) \
            .left_join(b).on_field('Unit') \
            .where(a.StatusTSI == 'Closed') \
            .where(b.MineSite == minesite) \
            .where(~a.Title.like('fc %')) \
            .where(a.DateTSISubmission.between(d_lower, d_upper)) \
            .groupby(year_month)

        f.set_self(vars())
Exemplo n.º 37
0
    def fulltime(self, time):
        """Method to convert a time difference in seconds to a date, based on the time_origin

        :param: time: input time
        :return: self.time_origin + time
        """
        time = time.time_origin if isinstance(time, TimeConverter) else time
        if self.calendar == 'np_datetime64':
            if isinstance(time, (list, np.ndarray)):
                return [
                    self.time_origin + np.timedelta64(int(t), 's')
                    for t in time
                ]
            else:
                return self.time_origin + np.timedelta64(int(time), 's')
        elif self.calendar in _get_cftime_calendars():
            return self.time_origin + delta(seconds=time)
        elif self.calendar is None:
            return self.time_origin + time
        else:
            raise RuntimeError('Calendar %s not implemented in TimeConverter' %
                               (self.calendar))
Exemplo n.º 38
0
    def get_wo_from_email(self, unit: str, title: str) -> Union[str, None]:
        """Get WO number from outlook

        Parameters
        ----------
        unit : str
            event unit
        title : str
            event title

        Returns
        -------
        Union[str, None]
            WO number if found

        Raises
        ------
        WONotFoundError
            if no wo found for unit/title
        """
        expr_sub = re.compile(f'{unit}.*{title}', re.IGNORECASE)
        expr_wo = re.compile('WO[0-9]{7}', re.IGNORECASE)

        # Filter messages to received in last 15 days
        d = (dt.now() + delta(days=-15)).strftime('%m/%d/%Y %H:%M %p')
        messages = self.wo_folder.Items.Restrict(f'[ReceivedTime] >= "{d}"')

        # items not sorted by date
        for item in messages:

            # find email by Unit - Title
            if re.search(expr_sub, item.Subject):

                match = re.search(expr_wo, item.Body)

                if not match is None:
                    wo = match.group(0)
                    return wo
Exemplo n.º 39
0
def radialrotation_fieldset(xdim, ydim):
    # Coordinates of the test fieldset (on A-grid in deg)
    a = b = 20000  # domain size
    lon = np.linspace(-a / 2, a / 2, xdim, dtype=np.float32)
    lat = np.linspace(-b / 2, b / 2, ydim, dtype=np.float32)

    # Define arrays U (zonal), V (meridional) on A-grid
    U = np.zeros((lon.size, lat.size), dtype=np.float32)
    V = np.zeros((lon.size, lat.size), dtype=np.float32)
    R = np.zeros((lon.size, lat.size), dtype=np.float32)

    omega = 2 * math.pi / delta(days=1).total_seconds()
    for i in range(lon.size):
        for j in range(lat.size):
            r = np.sqrt(lon[i]**2 + lat[j]**2)
            phi = np.arctan2(lat[j], lon[i])
            U[j, i] = -omega * r * math.sin(phi)
            V[j, i] = omega * r * math.cos(phi)
            R[j, i] = r

    data = {'U': U, 'V': V, 'P': R}
    dimensions = {'lon': lon, 'lat': lat}
    return FieldSet.from_data(data, dimensions, mesh='flat')
Exemplo n.º 40
0
    def run(self):
        # 현재시간
        now = datetime.datetime.now()
        # 다음날 오전 9시
        mid = datetime.datetime(now.year, now.month, now.day) + datetime.timedelta(hours=9)
        ma5 = get_yesterday_ma5(self.ticker)
        target_price = get_target_price(self.ticker)
        wait_flag = False

        while self.alive:
            try:
                now = datetime.datetime.now()
                # 다음날 오전 9시가 되면 target_price, 다음날 9시와 이동평균을 갱신하고 가진을 코인을 전부 시장가 매도
                if mid < now < mid + datetime.delta(seconds=10):
                    target_price = get_target_price(self.ticker)
                    mid = datetime.datetime(now.year, now.month, now.day) + datetime.timedelta(hours=9)
                    ma5 = get_yesterday_ma5(self.ticker)
                    desc  = sell_crypto_currency(self.upbit, self.ticker)
                    result = self.upbit.get_order(desc['uuid'])
                    timestamp = result['created_at']
                    self.tradingSent.emit(timestamp, "매도", result['volume'])
                    wait_flag = False

                if wait_flag == False:
                    current_price = pyupbit.get_current_price(self.ticker)

                    # 목표가격이 현재가보다 크고 이동평균이 현재가보다 작으면
                    if (current_price > target_price) and (current_price > ma5):
                        # 시장가 매수
                        desc = buy_crypto_currency(self.upbit, self.ticker)
                        result = self.upbit.get_order(desc['uuid'])
                        timestamp = result['created_at']
                        self.tradingSent.emit(timestamp, "매수", result['volume'])
                        wait_flag = True
            except: pass

            time.sleep(1)
Exemplo n.º 41
0
    def test_filtering_date_favorites(self):
        # manually creating an order with vasya
        vasya = Master.objects.get(first_name='VASYA')
        make_order(client=self.client_object,
                   master=vasya,
                   service=vasya.services.all()[0],
                   order_date=timezone.now() + delta(days=1),
                   order_time=datetime.time(hour=10, minute=30))

        url = f"{reverse(MasterListCreateView.view_name)}?" \
              f"date_range={utils.get_date(1)},{utils.get_date(8)}&" \
              f"coordinates=10.03,12.43"
        resp = self.client.get(url)
        self.assertEqual(resp.status_code, status.HTTP_200_OK)
        # VASYA is favorite
        favorites = resp.data['favorites']
        self.assertEqual(len(favorites), 1)
        self.assertEqual(favorites[0]['first_name'], 'VASYA')

        others = resp.data['others']
        # PETYA is not
        # both do at least one service in the following week
        self.assertEqual(len(others), 1)
        self.assertEqual(others[0]['first_name'], 'PETYA')
Exemplo n.º 42
0
def swimming():
    date1 = datetime.strptime('2021-07-05', '%Y-%m-%d')
    date2 = datetime.strptime('2021-07-06', '%Y-%m-%d')
    date3 = datetime.strptime('2021-07-08', '%Y-%m-%d')
    date4 = datetime.strptime('2021-07-09', '%Y-%m-%d')
    weeks = 14
    dates = []
    week_delta = delta(days=7)
    for i in range(weeks):
#        dates.append(date1)
#        dates.append(date2)
#        dates.append(date3)
#        dates.append(date4)
#        date1 = date1 + week_delta
#        date2 = date2 + week_delta
#        date3 = date3 + week_delta
#        date4 = date4 + week_delta
        dates.append(date1)
        dates.append(date1)
#        dates.append(date1)
        date1 = date1 + week_delta
    units = ['Heart Rate (BPM)','Distance (Laps)','Lap Time (s)']
    unit_steps = [range(100,180,4),[v for v in range(8,30)],range(30,112,4)]
    return render_template('graph.html',dates=dates,title='Swimming',units=units,unit_steps=unit_steps)
Exemplo n.º 43
0
def z_score(candle, periods):
    """Generate Mean/STD/Z-Score for given candle properties.
    Attempts to correct distorted Mean values in bearish/bullish markets by
    adjusting length of historic period. Perf: ~20ms
    Returns: pd.DataFrame w/ [5 x 4] dimensions
    """
    df = app.bot.dfc.loc[candle['pair'], strtofreq[candle['freq']]]
    co, cf = candle['open_time'], candle['freq']

    if cf == '1m':
        end = co - delta(minutes=1)
        start = end - delta(minutes=periods)
    elif cf == '5m':
        end = co - delta(minutes=5)
        start = end - delta(minutes=5 * periods)
    elif cf == '1h':
        end = co - delta(hours=1)
        start = end - delta(hours=periods)

    history = df.loc[slice(start, end)]

    # Smooth signal/noise ratio with EMA.
    ema = history.ewm(span=periods).mean()

    # Mean and SD
    stats = ema.describe()
    cols = ['close', 'volume', 'buy_ratio']

    # Calc Z-Scores
    data = [
        (candle['close'] - stats['close']['mean']) / stats['close']['std'],
        (candle['volume'] - stats['volume']['mean']) / stats['volume']['std'],
        (candle['buy_ratio'] - stats['buy_ratio']['mean']) /
        stats['buy_ratio']['std']
    ]

    return pd.Series(data, index=cols).astype('float64').round(8)
Exemplo n.º 44
0
lonsz = np.load('release_loc/coor/lons_id%d_dd%d.npy' % (posidx, int(dd)))

latsz = latsz
lonsz = lonsz

print('here0 : ', latsz.shape, lonsz.shape)
print('latsz: ', np.min(latsz), np.max(latsz))
print('lonsz: ', np.min(lonsz), np.max(lonsz))

if (not lonsz.size):
    sys.exit("Only land in the run with this idx")

dep = dd * np.ones(latsz.shape)

times = np.array([
    datetime(1975, 1, 2) - delta(days=(x + 0.5)) for x in range(0, 365 * 5, 3)
])
time = np.empty(shape=(0))
lons = np.empty(shape=(0))
lats = np.empty(shape=(0))
for i in range(len(times)):
    lons = np.append(lons, lonsz)
    lats = np.append(lats, latsz)
    time = np.append(time, np.full(len(lonsz), times[i]))


#%%
def set_fieldset(snapshots, hormesh, sfile):
    ufiles = [
        dirread_POP + 'tavg/' + 't.t0.1_42l_nccs01.0' + s + '.nc'
        for s in snapshots
Exemplo n.º 45
0
def create_question(question_text, days):
  time - timezone.now() + datetime.delta(days=days)
  return Question.objects.create(question_text=question_text, pub_date=time)
Exemplo n.º 46
0
 def was_published_recently(self):
     #Last 2 Days Of Communities --> Recent
     return self.community_creation_date >= timezone.now()-datetime.delta(days=2)
Exemplo n.º 47
0
def make_everything():
    # making an auth token
    vasya = make_master("VASYA", 11.0, about='a terrible master')
    petya = make_master("PETYA", 12.0)

    hands = make_category("Маникюр")
    feet = make_category("Педикюр")

    for service in hands.services.all():
        vasya.services.add(service)
    vasya.save()

    for service in feet.services.all():
        petya.services.add(service)
    petya.save()

    # VASYA works on 0,+1, does manicure, got three slots
    schedule = Schedule.objects.create(master=vasya,
                                       date=timezone.now() + delta(days=1))
    schedule.save()

    TimeSlot.objects.create(time=Time.objects.create(hour=10, minute=30),
                            taken=True,
                            schedule=schedule)
    TimeSlot.objects.create(time=Time.objects.create(hour=11, minute=00),
                            taken=False,
                            schedule=schedule)
    TimeSlot.objects.create(time=Time.objects.create(hour=11, minute=30),
                            taken=False,
                            schedule=schedule)
    TimeSlot.objects.create(time=Time.objects.create(hour=12, minute=00),
                            taken=False,
                            schedule=schedule)

    schedule = Schedule.objects.create(master=vasya,
                                       date=timezone.now() + delta(days=2))
    schedule.save()

    TimeSlot.objects.create(time=Time.objects.create(hour=12, minute=30),
                            taken=False,
                            schedule=schedule)
    TimeSlot.objects.create(time=Time.objects.create(hour=13, minute=00),
                            taken=False,
                            schedule=schedule)
    TimeSlot.objects.create(time=Time.objects.create(hour=13, minute=30),
                            taken=False,
                            schedule=schedule)

    # PETYA works on +2th, +3th does pedicure, got all slots on +2, none on +3
    schedule = Schedule.objects.create(master=petya,
                                       date=timezone.now() + delta(days=3))
    schedule.save()

    TimeSlot.objects.create(time=Time.objects.create(hour=10, minute=30),
                            taken=False,
                            schedule=schedule)
    TimeSlot.objects.create(time=Time.objects.create(hour=11, minute=00),
                            taken=False,
                            schedule=schedule)
    TimeSlot.objects.create(time=Time.objects.create(hour=11, minute=30),
                            taken=False,
                            schedule=schedule)
    TimeSlot.objects.create(time=Time.objects.create(hour=12, minute=30),
                            taken=True,
                            schedule=schedule)

    schedule = Schedule.objects.create(master=petya,
                                       date=timezone.now() + delta(days=4))
    schedule.save()

    TimeSlot.objects.create(time=Time.objects.create(hour=16, minute=30),
                            taken=False,
                            schedule=schedule)
Exemplo n.º 48
0
                   help='Numerical method used for advection')
    args = p.parse_args()
    filename = 'analytical_eddies'

    # Generate grid files according to chosen test setup
    if args.grid == 'stationary':
        grid = grid_stationary()
    elif args.grid == 'moving':
        grid = grid_moving()
    elif args.grid == 'decaying':
        grid = grid_decaying()

    npart = args.particles
    pset = grid.ParticleSet(size=npart,
                            pclass=ptype[args.mode],
                            lon=np.linspace(4000,
                                            21000,
                                            npart,
                                            dtype=np.float32),
                            lat=np.linspace(12500,
                                            12500,
                                            npart,
                                            dtype=np.float32))
    if args.verbose:
        print("Initial particle positions:\n%s" % pset)
    pset.execute(kernel[args.method],
                 dt=delta(minutes=3),
                 endtime=delta(hours=6))
    if args.verbose:
        print("Final particle positions:\n%s" % pset)
Exemplo n.º 49
0
                   help='Execution mode for performing RK4 computation')
    p.add_argument('-p', '--particles', type=int, default=10,
                   help='Number of particles to advect')
    p.add_argument('-v', '--verbose', action='store_true', default=False,
                   help='Print particle information before and after execution')
    p.add_argument('--profiling', action='store_true', default=False,
                   help='Print profiling information after run')
    p.add_argument('-g', '--grid', type=int, nargs=2, default=None,
                   help='Generate grid file with given dimensions')
    p.add_argument('-m', '--method', choices=('RK4', 'EE'), default='RK4',
                   help='Numerical method used for advection')
    args = p.parse_args()

    # Generate grid files according to given dimensions
    if args.grid is not None:
        grid = moving_eddies_grid(args.grid[0], args.grid[1])

    grid.add_field(CreateInitialPositionField(grid))

    ParticleClass = JITParticle if args.mode == 'jit' else Particle

    print(grid.fields)

    pset = grid.ParticleSet(size=args.particles, pclass=ParticleClass,
                            start_field=grid.Start)

    dt = delta(seconds=800)
    pset.execute(AdvectionRK4, endtime=delta(days=25), dt=dt,
                 output_file=pset.ParticleFile(name="ReleaseTestParticle"),
                 output_interval=12 * dt)
Exemplo n.º 50
0
pset = stommel_pset(fset, npart=1e6)

comm.Barrier()
if rank == 0:
    tac_pset = clock.time()
comm.Barrier()

kernel = AdvectionRK4 + pset.Kernel(UpdateP)

p = ArgumentParser(description="""
blablabla""")
p.add_argument('-j', '--job_id', type=int, default=1, help='job_id')
args = p.parse_args()

comm.Barrier()
if rank == 0:
    toc = clock.time()
comm.Barrier()

pset.execute(kernel, runtime=delta(days=365 * 50), dt=delta(hours=1))

comm.Barrier()
if rank == 0:
    tac = clock.time()
    ofile = 'stommel_mpi_scaling_long_1e6_50years.log'
    f = open(ofile, 'a')
    f.write('CPU time on %02d procs is: %g (%g %g) job id: %d\n' %
            (size, tac - tic, tac - toc, tac_pset - tic_pset, args.job_id))
    f.close()
Exemplo n.º 51
0
                  'time': 'Time'}
    if use_xarray:
        ds = xr.open_mfdataset([filenames['U'], filenames['V']])
        return FieldSet.from_xarray_dataset(ds, variables, dimensions, allow_time_extrapolation=True, deferred_load=deferred_load)
    else:
        return FieldSet.from_netcdf(filenames, variables, dimensions, allow_time_extrapolation=True, deferred_load=deferred_load)


@pytest.mark.parametrize('use_xarray', [True, False])
def test_ofam_fieldset_fillvalues(use_xarray):
    fieldset = set_ofam_fieldset(deferred_load=False, use_xarray=use_xarray)
    # V.data[0, 0, 150] is a landpoint, that makes NetCDF4 generate a masked array, instead of an ndarray
    assert(fieldset.V.data[0, 0, 150] == 0)


@pytest.mark.parametrize('dt', [delta(minutes=-5), delta(minutes=5)])
def test_ofam_xarray_vs_netcdf(dt):
    fieldsetNetcdf = set_ofam_fieldset(use_xarray=False)
    fieldsetxarray = set_ofam_fieldset(use_xarray=True)
    lonstart, latstart, runtime = (180, 10, delta(days=7))

    psetN = ParticleSet(fieldsetNetcdf, pclass=JITParticle, lon=lonstart, lat=latstart)
    psetN.execute(AdvectionRK4, runtime=runtime, dt=dt)

    psetX = ParticleSet(fieldsetxarray, pclass=JITParticle, lon=lonstart, lat=latstart)
    psetX.execute(AdvectionRK4, runtime=runtime, dt=dt)

    assert np.allclose(psetN[0].lon, psetX[0].lon)
    assert np.allclose(psetN[0].lat, psetX[0].lat)

Exemplo n.º 52
0
from parcels import FieldSet, Field, AdvectionRK4, ParticleSet, JITParticle, plotTrajectoriesFile, Variable, BrownianMotion2D, random
from parcels import ErrorCode
import numpy as np
from glob import glob
import time as timelib
from datetime import timedelta as delta
from datetime import datetime as datetime
import cartopy
import os
from operator import attrgetter

data_dir = ' /srv/scratch/z3097808/20year_run/20year_freerun_output_NEWnci/'

npart = 100  # number of particles to be released
repeatdt = delta(days=1)  # release from the same set of locations every X day

# Forward: 9
lon_array = [
    153.8072, 153.5873, 153.5460, 153.6929, 153.7817, 153.7955, 153.7790,
    153.7062, 153.5131
]
lat_array = [-26.0, -26.5, -27.0, -27.5, -28.0, -28.5, -29.0, -29.50, -30.00]

# Backwards: 13
#lon_array = [150.8550, 151.4167, 152.8444, 150.2451, 153.7313, 153.7861, 148.9148, 150.1600, 150.3833, 153.0958, 153.3182, 153.8036, 153.6422]
#lat_array = [-35.1, -33.8, -32, -36.2, -29.4, -28.1, -38, -37, -35.7, -31.4, -30.4, -28.8, -27.3]

#lon = lon_array[array_ref] * np.ones(npart)
#lat = lat_array[array_ref] * np.ones(npart)
Exemplo n.º 53
0
 def test_was_published_recently_with_future_question(self):
     time = timezone.now() + datetime.delta(days=30)
     future_question = Question(pub_date=time)
     self.assertIs(future_question.was_published_recently(), False)
Exemplo n.º 54
0
    def show_velocity(self, **kwargs):
        # time at which to plot velocity
        t = kwargs.get('t', None)
        if t is None:
            t = self.particles[0].time
        # flag to drawing land on plot
        land = kwargs.get('land', False)
        # plot domain latitude longitude parameters
        latN = kwargs.get('latN', np.nan)
        latS = kwargs.get('latS', np.nan)
        lonE = kwargs.get('lonE', np.nan)
        lonW = kwargs.get('lonW', np.nan)
        # maximum speed for vector coloring
        vmax = kwargs.get('vmax', None)
        # filename to saving to
        savefile = kwargs.get('savefile', None)

        if isinstance(t, datetime):
            t = (t - self.grid.U.time_origin).total_seconds()
        if isinstance(t, delta):
            t = t.total_seconds()

        if not np.isnan(latN):
            latN = nearest_index(self.grid.U.lat, latN)
            latS = nearest_index(self.grid.U.lat, latS)
            lonE = nearest_index(self.grid.U.lon, lonE)
            lonW = nearest_index(self.grid.U.lon, lonW)
            lon = self.grid.U.lon[lonW:lonE]
            lat = self.grid.U.lat[latS:latN]
        else:
            lon = self.grid.U.lon
            lat = self.grid.U.lat

        # time interpolation of velocity field
        idx = self.grid.U.time_index(t)
        U = np.array(self.grid.U.temporal_interpolate_fullfield(idx, t))
        V = np.array(self.grid.V.temporal_interpolate_fullfield(idx, t))
        if not np.isnan(latN):
            U = U[latS:latN, lonW:lonE]
            V = V[latS:latN, lonW:lonE]

        # configuring plot
        lat_median = np.median(lat)
        lon_median = np.median(lon)
        plt.figure()
        m = Basemap(projection='merc', lat_0=lat_median, lon_0=lon_median,
                    resolution='h', area_thresh=100,
                    llcrnrlon=lon[0], llcrnrlat=lat[0],
                    urcrnrlon=lon[-1], urcrnrlat=lat[-1])
        if land:
            m.drawcoastlines()
            m.fillcontinents(color='burlywood')
        parallels = np.arange(lat[0], lat[-1], abs(lat[0]-lat[-1])/5)
        parallels = np.around(parallels, 2)
        m.drawparallels(parallels, labels=[1, 0, 0, 0])
        meridians = np.arange(lon[0], lon[-1], abs(lon[0]-lon[-1])/5)
        meridians = np.around(meridians, 2)
        m.drawmeridians(meridians, labels=[0, 0, 0, 1])

        # formating velocity data for quiver plotting
        U = np.array([U[y, x] for x in range(len(lon)) for y in range(len(lat))])
        V = np.array([V[y, x] for x in range(len(lon)) for y in range(len(lat))])
        speed = np.sqrt(U**2 + V**2)
        normU = U/speed
        normV = V/speed
        x = np.repeat(lon, len(lat))
        y = np.tile(lat, len(lon))

        # plotting velocity vector field
        vecs = m.quiver(x, y, normU, normV, speed, cmap=plt.cm.gist_ncar, clim=[0, vmax], scale=50, latlon=True)
        cb = m.colorbar(vecs, "right", size="5%", pad="2%")
        plon = [p.lon for p in self]
        plat = [p.lat for p in self]
        xs, ys = m(plon, plat)
        # plotting particle data
        m.scatter(xs, ys, color='black')

        if(self.grid.U.time_origin == 0):
            plt.title(delta(seconds=t))
        else:
            plt.title(netCDF4.num2date(t, 'seconds since '+str(self.grid.U.time_origin)))

        if savefile is None:
            plt.show()
        else:
            plt.savefig(savefile)
            plt.close()
Exemplo n.º 55
0
    def _save(self, action, general=True):
        """
        Stores data relating to the recovery of a fragment for this request
        """

        super(FragmentSink, self)._save(action)

        # Override general parameter
        general = general and action.request.allow_generalisation

        # Fragment collection parameters
        requested_updating_delay = action.request.updating_delay
        if action.request.updating_delay is None:
            requested_updating_delay = MIN_SYNC_TIME
        self._pipe.hset(self._request_key, 'updating_delay', requested_updating_delay)
        self._pipe.hset(self._request_key, 'allow_generalisation', action.request.allow_generalisation)

        # Recover pattern from the request object
        self._graph_pattern = action.request.pattern

        effective_gp = self._generalize_gp() if general else self._graph_pattern

        # fragment_mapping is a tuple like (fragment_id, mapping)
        fragment_mapping = self.__check_gp_mappings(gp=effective_gp)
        exists = fragment_mapping is not None

        # Decide to proceed depending on whether it's the first time this request is received and the fragment
        # is already known
        proceed = action.id in self.passed_requests or (
            random() > 1.0 - PASS_THRESHOLD if not exists else random() > PASS_THRESHOLD)
        if not proceed:
            self.do_pass(action)
        if action.id in self.passed_requests:
            self.passed_requests.remove(action.id)

        lock = None
        try:
            if not exists:
                # If there is no mapping, register a new fragment collection for the general graph pattern
                fragment_id = str(uuid())
                self._fragment_key = self.__f_key_pattern.format(fragment_id)
                self._pipe.sadd(self._fragments_key, fragment_id)
                self._pipe.sadd('{}:gp'.format(self._fragment_key), *effective_gp)
                mapping = {str(k): str(k) for k in action.request.variable_labels}
                mapping.update({str(k): str(k) for k in self._filter_mapping})
            else:
                fragment_id, mapping = fragment_mapping
                self._fragment_key = self.__f_key_pattern.format(fragment_id)
                lock = fragment_lock(fragment_id)
                lock.acquire()
                # Remove the sync state if the fragment is on-demand mode
                if r.get('{}:on_demand'.format(self._fragment_key)) is not None:
                    self._pipe.delete('{}:sync'.format(self._fragment_key))

            # Here the following is persisted: mapping, pref_labels, fragment-request links and the original
            # graph_pattern
            self._pipe.hmset('{}map'.format(self._request_key), mapping)
            if action.request.preferred_labels:
                self._pipe.sadd('{}pl'.format(self._request_key), *action.request.preferred_labels)
            self._pipe.sadd('{}:requests'.format(self._fragment_key), self._request_id)
            self._pipe.hset(self._request_key, 'fragment_id', fragment_id)
            self._pipe.sadd('{}gp'.format(self._request_key), *self._graph_pattern)
            self._pipe.hset(self._request_key, 'pattern', ' . '.join(self._graph_pattern))

            # Update collection parameters
            fragment_synced = True
            current_updated = r.get('{}:updated'.format(self._fragment_key))
            if current_updated is not None:
                current_updated = dt.utcfromtimestamp(float(current_updated))
                utcnow = dt.utcnow()
                limit = utcnow - delta(seconds=requested_updating_delay)
                if limit > current_updated:
                    diff = (limit - current_updated).total_seconds()
                    self._pipe.delete('{}:sync'.format(self._fragment_key))
                    fragment_synced = False
                    # if diff > requested_updating_delay / 2.0:
                    #     self._pipe.delete('{}:updated'.format(self._fragment_key))

            current_updating_delay = int(
                r.get('{}:ud'.format(self._fragment_key))) if exists and fragment_synced else sys.maxint
            if current_updating_delay > requested_updating_delay:
                self._pipe.set('{}:ud'.format(self._fragment_key), requested_updating_delay)

            current_on_events = r.get('{}:events'.format(self._fragment_key))
            requested_on_events = action.request.update_on_events
            if current_on_events is None or (current_on_events is not None and current_on_events == 'True'):
                self._pipe.set('{}:events'.format(self._fragment_key), requested_on_events)

            # Update fragment request history
            # if not fragment_synced:
            #     self._pipe.delete('{}:hist'.format(self._fragment_key))
            self._pipe.lpush('{}:hist'.format(self._fragment_key), calendar.timegm(datetime.utcnow().timetuple()))
            self._pipe.ltrim('{}:hist'.format(self._fragment_key), 0, 3)

            # Populate attributes that may be required during the rest of the submission process
            self._dict_fields['mapping'] = mapping
            self._dict_fields['preferred_labels'] = action.request.preferred_labels
            self._dict_fields['fragment_id'] = fragment_id

            if not exists:
                _log.info('Request {} has started a new fragment collection: {}'.format(self._request_id, fragment_id))
            else:
                _log.info('Request {} is going to re-use fragment {}'.format(self._request_id, fragment_id))
                n_fragment_reqs = r.scard('{}:requests'.format(self._fragment_key))
                _log.info('Fragment {} is supporting {} more requests'.format(fragment_id, n_fragment_reqs))
        finally:
            if lock is not None:
                lock.release()
Exemplo n.º 56
0
                   default='stationary',
                   help='Generate fieldset file with given dimensions')
    p.add_argument('-m',
                   '--method',
                   choices=('RK4', 'EE', 'RK45'),
                   default='RK4',
                   help='Numerical method used for advection')
    args = p.parse_args()
    filename = 'analytical_eddies'

    # Generate fieldset files according to chosen test setup
    if args.fieldset == 'stationary':
        fieldset = fieldset_stationary()
    elif args.fieldset == 'moving':
        fieldset = fieldset_moving()
    elif args.fieldset == 'decaying':
        fieldset = fieldset_decaying()

    npart = args.particles
    pset = ParticleSet(fieldset,
                       pclass=ptype[args.mode],
                       lon=np.linspace(4000, 21000, npart, dtype=np.float32),
                       lat=np.linspace(12500, 12500, npart, dtype=np.float32))
    if args.verbose:
        print("Initial particle positions:\n%s" % pset)
    pset.execute(kernel[args.method],
                 dt=delta(minutes=3),
                 runtime=delta(hours=6))
    if args.verbose:
        print("Final particle positions:\n%s" % pset)
Exemplo n.º 57
0
    if fieldset.galapagosmask[time, particle.depth, particle.lat, particle.lon] == 4:
        particle.visitedgalapagos = 4
   
    
#additional features of the particles        
class GalapagosParticle(JITParticle):
    visitedgalapagos = Variable('visitedgalapagos', initial=0.)
    age = Variable('age', initial = 0.)   
    
    
# set particle conditions
pset = ParticleSet(fieldset=fieldset,
                   pclass=GalapagosParticle,
                   lon=startlon,
                   lat=startlat,
                   repeatdt=delta(days=5))

outfile = pset.ParticleFile(name=fname, outputdt=delta(days=1))
kernels = pset.Kernel(AdvectionRK4) + pset.Kernel(Age) + pset.Kernel(SampleGalapagos)

pset.execute(kernels,
             runtime=delta(days=366),
             dt=delta(hours=1),
             output_file=outfile,
             recovery={ErrorCode.ErrorOutOfBounds: DeleteParticle})

pset.repeatdt = None

pset.execute(kernels,
             runtime=delta(days=180),
             dt=delta(hours=1),
Exemplo n.º 58
0
    target = today_open + (yesterday_high - yesterday_low)#변동성 돌파 목표가 계산
    return target


'''
매도 함수
'''
def sell_crypto_currency(ticker):
    unit = upbit.get_balances()
    upbit.sell_market_order(ticker, unit)#해당 코인을 시장가로 매매

'''
매일 정각이 되면 목표가를 계산하여 갱신
'''
while True:
    try:
        now = datetime.datetime.now()#현재 시간 조회
        mid = datetime.datetime(now.year, now.month, now.day, 9)  # 오전 9시를 고정으로 잡는다.
        if mid < now < mid + datetime.delta(seconds=10):#00시를 초단위로 정확하게 알지 못하기 떄문에 10초의 범위안에서 실행하게 한다.
            sell_crypto_currency("KRW-BTC")#모든 코인을 매도
            target_price = get_target_price("KRW-BTC")#목표가 계산


        current_price = pyupbit.get_current_price("KRW-BTC")#현재가 조회
        if current_price > target_price:#현재가가 목표가보다 높은지 검사
            buy_crypto_currency("KRW-BTC")#매수
    except:
        print("에러 발생")
        time.sleep(1)

Exemplo n.º 59
0
bithumb = pybithumb.Bithumb(con_key, sec_key)

def get_target_price(ticker):
    df = pybithumb.get_ohlcv(ticker)
    yesterday = df.iloc[-2]

    today_open = yesterday['close']
    yesterday_high = yesterday['high']
    yesterday_low = yesterday['low']
    target = today_open + (yesterday_high - yesterday_low) * 0.5
    return target

now = datetime.datetime.now()
mid = datetime.datetime(now.year, now.month, now.day) + datetime.timedelta(1)
target_price = get_target_price("BTC")

while True:
    now = datetime.datetime.now()
    if mid < now < mid + datetime.delta(seconds=10): 
        target_price = get_target_price("BTC")
        mid = datetime.datetime(now.year, now.month, now.day) + datetime.timedelta(1)

    current_price = pybithumb.get_current_price("BTC")
    if current_price > target_price:
        krw = bithumb.get_balance("BTC")[2]
        orderbook = pybithumb.get_orderbook("BTC")
        sell_price = orderbook['asks'][0]['price']      
        unit = krw/float(sell_price)
        bithumb.buy_market_order("BTC", unit)

    time.sleep(1)
import os
import pandas as pd
import pandas.tools as tl
import pymongo
import sys


def date_handler(obj):
    return obj.isoformat() if hasattr(obj, 'isoformat') else obj

end = dtm.now()
endmidnight = end.replace (hour=0, minute=0, second=0, microsecond=0)

# For testing purpose reduced the duration to a few hours
#start = end + delta (days=-1)
start = end + delta (hours=-8)

# The MongoDB should not be set to use a usename and password for access
MONGO_URL = os.environ['MONGO_URL']

try:
    conn = pymongo.Connection (MONGO_URL, safe=True)
except pymongo.errors.ConnectionFailure as e:
    print 'Error: check  your MongoDB connectivity'
    print 'Error:', e
    sys.exit()

# Databases to use
db2 = conn.collectd

# Collections to use