コード例 #1
0
ファイル: test_pytide.py プロジェクト: CNES/pangeo-pytide
    def test_degraded(self):
        with netCDF4.Dataset(self.DATASET) as dataset:
            time = dataset['time'][:] * 1e-6
            h = dataset['ocean'][:] * 1e-2

        wt = pytide.WaveTable()

        wt.compute_nodal_modulations(
            [datetime.datetime(2012, 1, 1),
             datetime.datetime(2012, 1, 2)])
        wt.compute_nodal_modulations(
            numpy.array([
                numpy.datetime64("2012-01-01"),
                numpy.datetime64("2012-01-02")
            ]))

        with self.assertRaises(TypeError):
            wt.compute_nodal_modulations(datetime.datetime(2012, 1, 1))

        with self.assertRaises(TypeError):
            wt.compute_nodal_modulations(time)

        with self.assertRaises(TypeError):
            wt.compute_nodal_modulations([3])

        with self.assertRaises(ValueError):
            wt.compute_nodal_corrections(3)
コード例 #2
0
def main():
    args = usage()
    setup_logging(args.log)

    client = dask.distributed.Client(
        dask.distributed.LocalCluster(threads_per_worker=1)
    ) if args.local_cluster else dask.distributed.Client(
        scheduler_file=args.scheduler_file)
    logging.info(client)

    # Reading the list of files and associated dates to
    # be processed.
    time_series = t_axis(args.dirname)
    period = (time_series >= args.start_date) & (time_series <= args.end_date)
    logging.info("number of files to process %d", len(time_series[period]))
    logging.info("period [%s, %s]", time_series[period].min(),
                 time_series[period].max())

    wave_table = pytide.WaveTable(args.tidal_constituents)
    logging.info("%d tidal constituents to be analysed", len(wave_table))

    f, v0u = compute_nodal_modulations(client, wave_table, time_series[period])

    if not os.path.exists(args.result):
        # Create the result file
        logging.info("create the result file %r", args.result)
        create_result(args.template.name,
                      args.result,
                      args.variable,
                      wave_table,
                      chunk=args.var_chunk)
    else:
        # otherwise, we test the access to the file before continuing.
        with netCDF4.Dataset(args.result):
            pass

    del wave_table
    del time_series

    # Load the time series
    for face in args.face:
        logging.info("processing face %d", face)
        ds = load_faces(args.dirname,
                        face,
                        args.variable,
                        period,
                        chunk=args.var_chunk)
        logging.info("loaded %s", dask_array_properties(ds))
        ds = ds.rechunk(dask_array_rechunk(ds, args.nblocks))
        logging.info("fragmented %s", dask_array_properties(ds))
        future = apply_along_axis(pytide.WaveTable.harmonic_analysis, 2, ds,
                                  *(f, v0u))
        result = future.compute()
        result = numpy.transpose(result, [2, 0, 1])
        logging.info("write face #%d", face)
        write_one_face(args.result, result, face, args.variable)
        logging.info("calculation completed for face #%d", face)
        client.cancel(ds)

    logging.info("calculation done")
コード例 #3
0
ファイル: tipe.py プロジェクト: laspg/itide_ops
def harmonic_analysis(df, ssh_key, min_count=100, constituents=None):

    if constituents is None:
        _cst = ["M2", "K1", "O1", "P1", "Q1", "S1"]
    else:
        _cst = constituents

    if df.empty or df.size < min_count:

        return pd.DataFrame([[0. for c in _cst]], columns=_cst)

    else:

        # preliminary info
        time = df['time'].to_numpy(dtype="datetime64[us]")
        wt = pytide.WaveTable(_cst)
        f, vu = wt.compute_nodal_modulations(time)

        # get harmonics
        w = wt.harmonic_analysis(df[ssh_key].to_numpy(), f, vu)

        # predicted tidal contribution
        #hp = wt.tide_from_tide_series(time, w)

        return pd.DataFrame([w], columns=_cst)
コード例 #4
0
ファイル: test_pytide.py プロジェクト: CNES/pangeo-pytide
 def test_wave(self):
     aa = pytide.AstronomicAngle(datetime.datetime(2000, 1, 1))
     wt = pytide.WaveTable(["M2"])
     wave = wt.wave("M2")
     self.assertAlmostEqual(wave.freq * 86400,
                            12.140833182614747,
                            delta=1e-6)
     self.assertEqual(wave.type, wave.TidalType.kShortPeriod)
コード例 #5
0
ファイル: test_pytide.py プロジェクト: CNES/pangeo-pytide
    def test_init(self):
        wt = pytide.WaveTable()
        self.assertEqual(len(wt), 67)
        self.assertEqual(len([item for item in wt]), 67)
        self.assertEqual(wt.wave("M2"), wt.wave(pytide.Wave.Ident.kM2))
        self.assertNotEqual(wt.wave("M2"), wt.wave(pytide.Wave.Ident.kK1))
        self.assertTrue(wt.wave("__M2__") is None)
        self.assertListEqual(sorted(wt.known_constituents()),
                             sorted([item.name() for item in wt]))
        for item in wt:
            self.assertEqual(item.ident,
                             getattr(pytide.Wave.Ident, "k" + item.name()))

        wt = pytide.WaveTable(["M2", "K1", "O1", "P1", "Q1", "S1"])
        self.assertEqual(len(wt), 6)
        self.assertListEqual(sorted([item.name() for item in wt]),
                             sorted(["M2", "K1", "O1", "P1", "Q1", "S1"]))
コード例 #6
0
ファイル: test_pytide.py プロジェクト: CNES/pangeo-pytide
    def test_analysis(self):
        with netCDF4.Dataset(self.DATASET) as dataset:
            time = (dataset['time'][:] * 1e-6).astype("datetime64[s]")
            h = dataset['ocean'][:] * 1e-2

        wt = pytide.WaveTable()
        f, vu = wt.compute_nodal_modulations(time)
        w = wt.harmonic_analysis(h, f, vu)
        delta = h - wt.tide_from_tide_series(time, w)

        self.assertAlmostEqual(delta.mean(), 0, delta=1e-16)
        self.assertAlmostEqual(delta.std(), 0, delta=1e-12)
コード例 #7
0
whereeNATL60 = np.where(domaineNATL60)
jmineNATL60 = whereeNATL60[0].min()
jmaxeNATL60 = whereeNATL60[0].max()
imineNATL60 = whereeNATL60[1].min()
imaxeNATL60 = whereeNATL60[1].max()

START_DATE = np.datetime64('2009-07-01')
END_DATE = np.datetime64('2010-06-30')
time_series = ds['time_counter']
period = (time_series >= START_DATE) & (time_series <= END_DATE)
time = time_series[period]
ssh = ds.sossheig[period, jmineNATL60:jmaxeNATL60 + 1,
                  imineNATL60:imaxeNATL60 + 1]
t = time.values

wt = pytide.WaveTable()
wt = pytide.WaveTable(["M2"])
f, vu = wt.compute_nodal_modulations(t)


def dask_array_rechunk(da, axis=0):
    """Search for the optimal block cutting without modifying the axis 'axis'
    in order to optimize its access in memory."""
    nblocks = 1

    def calculate_chuncks_size(chunks, size):
        result = np.array(chunks).prod() * size
        return result / (1000**2)

    while True:
        chunks = []