Exemplo n.º 1
0
    def test_appending_variables(self):
        datasets_path = resource('erddap', 'datasets.xml')
        netcdf_files = [
            resource('erddap', 'fakedeployment', 'rt', 'netcdf', '1.nc'),  # Creates datasets.xml
            resource('erddap', 'fakedeployment', 'rt', 'netcdf', '2.nc'),  # Adds additional variables
            resource('erddap', 'fakedeployment', 'rt', 'netcdf', '3.nc')   # Should not remove any variables
        ]

        for n in netcdf_files:
            netcdf_to_erddap_dataset(
                resource('erddap'),
                datasets_path,
                n,
                None
            )

        xmltree = etree.parse(datasets_path).getroot()
        find_dataset = etree.XPath("//erddapDatasets/dataset")
        ds = find_dataset(xmltree)[0]
        vs = [ d.findtext('sourceName') for d in ds.iter('dataVariable') ]

        # Temperature is only in 2.nc and not 1.nc or 3.nc. Make sure
        # it was carried through correctly
        assert 'temperature' in vs
        assert 'conductivity' in vs
        assert 'salinity' in vs
        assert 'density' in vs
Exemplo n.º 2
0
    def test_large_merge(self):
        folder = resource('slocum', 'merge', 'large')
        output = resource('slocum', 'merge', 'output', 'large.nc')
        merge_profile_netcdf_files(folder, output)

        with ContiguousRaggedTrajectoryProfile(output) as ncd:
            assert ncd.is_valid()
Exemplo n.º 3
0
 def setUp(self):
     super().setUp()
     self.binary_path = resource('slocum', 'modena-20150625T0000',
                                 'delayed', 'binary')
     self.ascii_path = resource('slocum', 'modena-20150625T0000', 'delayed',
                                'ascii')
     self.cache_path = resource('slocum', 'modena-20150625T0000', 'config')
Exemplo n.º 4
0
    def test_gutils_netcdf_to_erddap_watch(self):

        wm = WatchManager()
        mask = IN_MOVED_TO | IN_CLOSE_WRITE

        # Convert ASCII data to NetCDF
        processor = Netcdf2ErddapProcessor(
            deployments_path=resource('slocum'),
            erddap_content_path=erddap_content_path,
            erddap_flag_path=erddap_flag_path
        )
        notifier = ThreadedNotifier(wm, processor, read_freq=5)
        notifier.coalesce_events()
        notifier.start()

        wdd = wm.add_watch(
            netcdf_path,
            mask,
            rec=True,
            auto_add=True
        )

        # Wait 5 seconds for the watch to start
        time.sleep(5)

        orig_netcdf = resource('profile.nc')
        dummy_netcdf = os.path.join(netcdf_path, 'profile.nc')
        shutil.copy(orig_netcdf, dummy_netcdf)

        wait_for_files(erddap_content_path, 1)
        wait_for_files(erddap_flag_path, 1)

        wm.rm_watch(wdd.values(), rec=True)
        notifier.stop()
Exemplo n.º 5
0
    def setUp(self):
        super(TestSlocumMerger, self).setUp()

        self.binary_path = resource('slocum', 'real', 'binary',
                                    'bass-20150407T1300')
        self.ascii_path = resource('slocum', 'real', 'ascii',
                                   'bass-20150407T1300')
Exemplo n.º 6
0
def test_real_deployments(deployment):
    setup_testing_logger(level=logging.WARNING)
    binary_path = resource('slocum', deployment, 'rt', 'binary')
    ascii_path = resource('slocum', deployment, 'rt', 'ascii')
    netcdf_path = resource('slocum', deployment, 'rt', 'netcdf')
    config_path = resource('slocum', deployment, 'config')

    # Static args
    args = dict(reader_class=SlocumReader,
                deployments_path=resource('slocum'),
                subset=True,
                template='ioos_ngdac',
                profile_id_type=2,
                filter_distance=1,
                filter_points=5,
                filter_time=10,
                filter_z=1)

    try:
        merger = SlocumMerger(
            binary_path,
            ascii_path,
            cache_directory=config_path,
        )
        for p in merger.convert():
            args['file'] = p['ascii']
            create_dataset(**args)
    finally:
        # Cleanup
        shutil.rmtree(ascii_path, ignore_errors=True)  # Remove generated ASCII
        shutil.rmtree(netcdf_path,
                      ignore_errors=True)  # Remove generated netCDF
        # Remove any cached .cac files
        for cac in glob(os.path.join(binary_path, '*.cac')):
            os.remove(cac)
Exemplo n.º 7
0
 def setUp(self):
     super().setUp()
     self.binary_path = resource('slocum', 'unit_507_one', 'delayed',
                                 'binary')
     self.ascii_path = resource('slocum', 'unit_507_one', 'delayed',
                                'ascii')
     self.netcdf_path = resource('slocum', 'unit_507_one', 'delayed',
                                 'netcdf')
     self.cache_path = resource('slocum', 'unit_507_one', 'config')
Exemplo n.º 8
0
    def test_gutils_ascii_to_netcdf_watch(self):

        wm = WatchManager()
        mask = IN_MOVED_TO | IN_CLOSE_WRITE

        # Convert ASCII data to NetCDF
        processor = Slocum2NetcdfProcessor(deployments_path=resource('slocum'),
                                           subset=False,
                                           template='trajectory',
                                           profile_id_type=2,
                                           tsint=10,
                                           filter_distance=1,
                                           filter_points=5,
                                           filter_time=10,
                                           filter_z=1)
        notifier = ThreadedNotifier(wm, processor)
        notifier.coalesce_events()
        notifier.start()

        wdd = wm.add_watch(ascii_path, mask, rec=True, auto_add=True)

        # Wait 5 seconds for the watch to start
        time.sleep(5)

        # Make the ASCII we are watching for
        merger = SlocumMerger(original_binary,
                              ascii_path,
                              globs=['*.tbd', '*.sbd'])
        merger.convert()

        wait_for_files(netcdf_path, 230)

        wm.rm_watch(wdd.values(), rec=True)
        notifier.stop()
Exemplo n.º 9
0
    def test_gutils_binary_to_ascii_watch(self):

        wm = WatchManager()
        mask = IN_MOVED_TO | IN_CLOSE_WRITE

        # Convert binary data to ASCII
        processor = Slocum2AsciiProcessor(
            deployments_path=resource('slocum'),
        )
        notifier = ThreadedNotifier(wm, processor)
        notifier.coalesce_events()
        notifier.start()

        wdd = wm.add_watch(
            binary_path,
            mask,
            rec=True,
            auto_add=True
        )

        # Wait 5 seconds for the watch to start
        time.sleep(5)

        gpath = os.path.join(original_binary, '*.*bd')
        # Sort the files so the .cac files are generated in the right order
        for g in sorted(glob(gpath)):
            shutil.copy2(g, binary_path)

        wait_for_files(ascii_path, 32)

        wm.rm_watch(wdd.values(), rec=True)
        notifier.stop()
Exemplo n.º 10
0
 def tearDown(self):
     outputs = [
         resource('slocum', 'real', 'netcdf')
     ]
     for d in outputs:
         try:
             shutil.rmtree(d)
         except (IOError, OSError):
             pass
Exemplo n.º 11
0
    def test_all_ascii(self):
        out_base = resource('slocum', 'bass-test-ascii', 'rt', 'netcdf')

        try:
            for f in glob(resource('slocum', 'bass-test-ascii', 'rt', 'ascii', 'usf_bass*.dat')):
                args = dict(
                    file=f,
                    reader_class=SlocumReader,
                    deployments_path=resource('slocum'),
                    subset=False,
                    template='ioos_ngdac',
                    profile_id_type=2,
                    tsint=10,
                    filter_distance=1,
                    filter_points=5,
                    filter_time=10,
                    filter_z=1
                )
                create_dataset(**args)

            output_files = sorted(os.listdir(out_base))
            output_files = [ os.path.join(out_base, o) for o in output_files ]

            # First profile
            with nc4.Dataset(output_files[0]) as ncd:
                assert ncd.variables['profile_id'].ndim == 0
                assert ncd.variables['profile_id'][0] == 0

            # Last profile
            with nc4.Dataset(output_files[-1]) as ncd:
                assert ncd.variables['profile_id'].ndim == 0
                assert ncd.variables['profile_id'][0] == len(output_files) - 1

            # Check netCDF file for compliance
            ds = namedtuple('Arguments', ['file'])
            for o in output_files:
                assert check_dataset(ds(file=o)) == 0

        finally:
            # Cleanup
            shutil.rmtree(out_base)
Exemplo n.º 12
0
    def test_defaults(self):
        out_base = resource('slocum', 'bass-test-ascii', 'rt', 'netcdf')

        try:
            args = dict(
                file=resource('slocum', 'bass-test-ascii', 'rt', 'ascii', 'usf_bass_2016_253_0_6_sbd.dat'),
                reader_class=SlocumReader,
                deployments_path=resource('slocum'),
                subset=False,
                template='trajectory',
                profile_id_type=1,
                tsint=10,
                filter_distance=1,
                filter_points=5,
                filter_time=10,
                filter_z=1
            )
            create_dataset(**args)

            output_files = sorted(os.listdir(out_base))
            output_files = [ os.path.join(out_base, o) for o in output_files ]
            assert len(output_files) == 32

            # First profile
            with nc4.Dataset(output_files[0]) as ncd:
                assert ncd.variables['profile_id'].ndim == 0
                assert ncd.variables['profile_id'][0] == 1473499526

            # Last profile
            with nc4.Dataset(output_files[-1]) as ncd:
                assert ncd.variables['profile_id'].ndim == 0
                assert ncd.variables['profile_id'][0] == 1473509128

            # Check netCDF file for compliance
            ds = namedtuple('Arguments', ['file'])
            for o in output_files:
                assert check_dataset(ds(file=o)) == 0

        finally:
            # Cleanup
            shutil.rmtree(out_base)
Exemplo n.º 13
0
    def test_all_ascii(self):
        out_base = resource('slocum', 'real', 'netcdf', 'bass-20160909T1733')
        safe_makedirs(out_base)

        for f in glob(resource('slocum', 'usf_bass*.dat')):
            args = dict(
                file=f,
                reader_class=SlocumReader,
                config_path=resource('slocum', 'config', 'bass-20160909T1733'),
                output_path=out_base,
                subset=False,
                template='ioos_ngdac',
                profile_id_type=2,
                tsint=10,
                filter_distance=1,
                filter_points=5,
                filter_time=10,
                filter_z=1
            )
            create_dataset(**args)

        output_files = sorted(os.listdir(out_base))
        output_files = [ os.path.join(out_base, o) for o in output_files ]

        # First profile
        with nc4.Dataset(output_files[0]) as ncd:
            assert ncd.variables['profile_id'].ndim == 0
            assert ncd.variables['profile_id'][0] == 0

        # Last profile
        with nc4.Dataset(output_files[-1]) as ncd:
            assert ncd.variables['profile_id'].ndim == 0
            assert ncd.variables['profile_id'][0] == len(output_files) - 1

        # Check netCDF file for compliance
        ds = namedtuple('Arguments', ['file'])
        for o in output_files:
            assert check_dataset(ds(file=o)) == 0
Exemplo n.º 14
0
    def test_parameter_filters_override_config(self):
        out_base = resource('slocum', 'bass-test-filters-override', 'rt',
                            'netcdf')

        try:
            args = dict(file=resource('slocum', 'bass-test-filters-override',
                                      'rt', 'ascii',
                                      'usf_bass_2016_253_0_6_sbd.dat'),
                        reader_class=SlocumReader,
                        deployments_path=resource('slocum'),
                        subset=True,
                        template='ioos_ngdac',
                        profile_id_type=1,
                        tsint=None,
                        filter_distance=None,
                        filter_points=None,
                        filter_time=None,
                        filter_z=32)
            # This filters to a single profile
            create_dataset(**args)

            output_files = sorted(os.listdir(out_base))
            output_files = [os.path.join(out_base, o) for o in output_files]
            assert len(output_files) == 1

            # Only profile
            with nc4.Dataset(output_files[0]) as ncd:
                assert ncd.variables['profile_id'].ndim == 0
                assert ncd.variables['profile_id'][0] == 1473507417

            # Check netCDF file for compliance
            ds = namedtuple('Arguments', ['file'])
            for o in output_files:
                assert check_dataset(ds(file=o)) == 0

        finally:
            # Cleanup
            shutil.rmtree(out_base)
Exemplo n.º 15
0
def test_real_deployments(deployment):
    setup_testing_logger(level=logging.WARNING)
    binary_path = resource('slocum', 'real', 'binary', deployment)
    ascii_path = resource('slocum', 'real', 'ascii', deployment)
    netcdf_path = resource('slocum', 'real', 'netcdf', deployment)
    default_configs = resource('slocum', 'real', 'config', deployment)

    # Config path is usually an env variable pointing to a configuration setup
    all_config_path = os.environ.get('GUTILS_TEST_CONFIG_DIRECTORY',
                                     default_configs)
    config_path = os.path.join(all_config_path, deployment)

    # Static args
    args = dict(reader_class=SlocumReader,
                config_path=config_path,
                output_path=netcdf_path,
                subset=True,
                template='ioos_ngdac',
                profile_id_type=2,
                filter_distance=1,
                filter_points=5,
                filter_time=10,
                filter_z=1)

    try:
        merger = SlocumMerger(binary_path, ascii_path)
        for p in merger.convert():
            args['file'] = p['ascii']
            create_dataset(**args)
    finally:
        # Cleanup
        shutil.rmtree(ascii_path, ignore_errors=True)  # Remove generated ASCII
        shutil.rmtree(netcdf_path,
                      ignore_errors=True)  # Remove generated netCDF
        # Remove any cached .cac files
        for cac in glob(os.path.join(binary_path, '*.cac')):
            os.remove(cac)
Exemplo n.º 16
0
    def test_delayed(self):
        out_base = resource('slocum', 'real', 'netcdf', 'modena-2015')

        args = dict(
            file=resource('slocum', 'modena_2015_175_0_9_dbd.dat'),
            reader_class=SlocumReader,
            config_path=resource('slocum', 'config', 'modena-2015'),
            output_path=out_base,
            subset=False,
            template='trajectory',
            profile_id_type=1,
            tsint=10,
            filter_distance=1,
            filter_points=5,
            filter_time=10,
            filter_z=1
        )
        create_dataset(**args)

        output_files = sorted(os.listdir(out_base))
        output_files = [ os.path.join(out_base, o) for o in output_files ]
        assert len(output_files) == 6

        # First profile
        with nc4.Dataset(output_files[0]) as ncd:
            assert ncd.variables['profile_id'].ndim == 0
            assert ncd.variables['profile_id'][0] == 1435257435

        # Last profile
        with nc4.Dataset(output_files[-1]) as ncd:
            assert ncd.variables['profile_id'].ndim == 0
            assert ncd.variables['profile_id'][0] == 1435264145

        # Check netCDF file for compliance
        ds = namedtuple('Arguments', ['file'])
        for o in output_files:
            assert check_dataset(ds(file=o)) == 0
Exemplo n.º 17
0
    def test_defaults(self):
        out_base = resource('slocum', 'real', 'netcdf', 'bass-20160909T1733')
        args = dict(
            file=resource('slocum', 'usf_bass_2016_253_0_6_sbd.dat'),
            reader_class=SlocumReader,
            config_path=resource('slocum', 'config', 'bass-20160909T1733'),
            output_path=out_base,
            subset=False,
            template='trajectory',
            profile_id_type=1,
            tsint=10,
            filter_distance=1,
            filter_points=5,
            filter_time=10,
            filter_z=1
        )
        create_dataset(**args)

        output_files = sorted(os.listdir(out_base))
        output_files = [ os.path.join(out_base, o) for o in output_files ]
        assert len(output_files) == 32

        # First profile
        with nc4.Dataset(output_files[0]) as ncd:
            assert ncd.variables['profile_id'].ndim == 0
            assert ncd.variables['profile_id'][0] == 1473499507

        # Last profile
        with nc4.Dataset(output_files[-1]) as ncd:
            assert ncd.variables['profile_id'].ndim == 0
            assert ncd.variables['profile_id'][0] == 1473509118

        # Check netCDF file for compliance
        ds = namedtuple('Arguments', ['file'])
        for o in output_files:
            assert check_dataset(ds(file=o)) == 0
Exemplo n.º 18
0
    def test_z_axis_method(self):
        merger = SlocumMerger(self.binary_path,
                              self.ascii_path,
                              cache_directory=self.cache_path,
                              globs=['unit_507-2021-308*'])
        _ = merger.convert()

        dat_files = [
            p for p in os.listdir(self.ascii_path) if p.endswith('.dat')
        ]
        for ascii_file in dat_files:
            args = dict(file=os.path.join(self.ascii_path, ascii_file),
                        reader_class=SlocumReader,
                        deployments_path=resource('slocum'),
                        subset=True,
                        template='slocum_dac',
                        profile_id_type=1,
                        tsint=10,
                        filter_distance=1,
                        filter_points=5,
                        filter_time=10,
                        filter_z=1,
                        z_axis_method=2)
            create_dataset(**args)

        assert os.path.exists(self.netcdf_path)

        output_files = sorted(os.listdir(self.netcdf_path))
        output_files = [
            os.path.join(self.netcdf_path, o) for o in output_files
        ]
        assert len(output_files) == 28

        # First profile
        with nc4.Dataset(output_files[0]) as ncd:
            assert ncd.variables['profile_id'].ndim == 0
            assert ncd.variables['profile_id'][0] == 1636072703

        # Last profile
        with nc4.Dataset(output_files[-1]) as ncd:
            assert ncd.variables['profile_id'].ndim == 0
            assert ncd.variables['profile_id'][0] == 1636146248

        # Check netCDF file for compliance
        ds = namedtuple('Arguments', ['file'])
        for o in output_files:
            assert check_dataset(ds(file=o)) == 0
Exemplo n.º 19
0
    def test_pseudogram(self):
        merger = SlocumMerger(self.binary_path,
                              self.ascii_path,
                              cache_directory=self.cache_path,
                              globs=['*'])
        _ = merger.convert()

        dat_files = [
            p for p in os.listdir(self.ascii_path) if p.endswith('.dat')
        ]
        for ascii_file in dat_files:
            args = dict(file=os.path.join(self.ascii_path, ascii_file),
                        reader_class=SlocumReader,
                        deployments_path=resource('slocum'),
                        subset=True,
                        template='slocum_dac',
                        profile_id_type=1,
                        tsint=10,
                        filter_distance=1,
                        filter_points=5,
                        filter_time=10,
                        filter_z=1,
                        z_axis_method=1)
            create_dataset(**args)

        assert os.path.exists(self.netcdf_path)

        output_files = sorted(os.listdir(self.netcdf_path))
        output_files = [
            os.path.join(self.netcdf_path, o) for o in output_files
        ]
        assert len(output_files) == 17

        # First profile
        with nc4.Dataset(output_files[0]) as ncd:
            assert ncd.variables['profile_id'].ndim == 0
            assert ncd.variables['profile_id'][0] == 1639020410

        # Last profile
        with nc4.Dataset(output_files[-1]) as ncd:
            assert ncd.variables['profile_id'].ndim == 0
            assert ncd.variables['profile_id'][0] == 1639069272
Exemplo n.º 20
0
 def setUp(self):
     super(TestSlocumExportDelayed, self).setUp()
     self.binary_path = resource('slocum', 'real', 'binary', 'modena-2015')
     self.ascii_path = resource('slocum', 'real', 'ascii', 'modena-2015')
     self.cache_path = os.path.join(self.binary_path, 'cac')
Exemplo n.º 21
0
    WatchManager
)

from gutils import safe_makedirs
from gutils.slocum import SlocumMerger
from gutils.watch.binary import Slocum2AsciiProcessor
from gutils.watch.ascii import Slocum2NetcdfProcessor
from gutils.watch.netcdf import Netcdf2ErddapProcessor
from gutils.tests import resource, output, GutilsTestClass

import logging
L = logging.getLogger(__name__)  # noqa

deployment = 'bass-test-watch'

binary_path     = resource('slocum', deployment, 'rt', 'binary')
ascii_path      = resource('slocum', deployment, 'rt', 'ascii')
netcdf_path     = resource('slocum', deployment, 'rt', 'netcdf')
original_binary = resource('slocum', 'bass-20160909T1733', 'rt', 'binary')
config_path     = resource('slocum', deployment, 'config')

erddap_content_path = output('erddap', 'content')
erddap_flag_path = output('erddap', 'flag')
ftp_path = output('ftp')


def wait_for_files(path, number, loops=20, sleep=6):
    # Wait for NetCDF to be created
    count = 0
    while True:
        try:
Exemplo n.º 22
0
def is_continuous(profiled_dataset):
    last_profile_id = 0
    for i, row in enumerate(profiled_dataset.itertuples()):
        profile_diff = abs(last_profile_id - row.profile)

        if profile_diff == 1:
            last_profile_id = row.profile
        elif profile_diff > 1:
            print("Inconsistency @: %d, Last Profile: %d, Current: %d" %
                  (i, last_profile_id, row.profile))
            return False

    return True


ctd_filepath = resource('slocum', 'bass-test-ascii', 'rt', 'ascii',
                        'usf_bass_2016_253_0_6_sbd.dat')


class TestFindProfile(GutilsTestClass):
    def setUp(self):
        super(TestFindProfile, self).setUp()

        sr = SlocumReader(ctd_filepath)
        self.df = sr.standardize()

        self.profiled_dataset = assign_profiles(self.df, tsint=10)

    def test_find_profile(self):
        assert len(self.profiled_dataset) != 0
        assert len(self.profiled_dataset) == len(self.df)
        assert len(self.profiled_dataset.profile.dropna().unique()) == 32
Exemplo n.º 23
0
from glob import glob

from pyinotify import (IN_CLOSE_WRITE, IN_MOVED_TO, ThreadedNotifier,
                       WatchManager)

from gutils import safe_makedirs
from gutils.slocum import SlocumMerger
from gutils.watch.binary import Slocum2AsciiProcessor
from gutils.watch.ascii import Slocum2NetcdfProcessor
from gutils.watch.netcdf import Netcdf2ErddapProcessor
from gutils.tests import resource, output, GutilsTestClass

import logging
L = logging.getLogger(__name__)  # noqa

config_path = resource('slocum', 'config', 'bass-20160909T1733')
original_binary = resource('slocum', 'real', 'binary', 'bass-20160909T1733')
binary_path = output('binary', 'bass-20160909T1733')
ascii_path = output('ascii', 'bass-20160909T1733')
netcdf_path = output('netcdf', 'bass-20160909T1733')
erddap_content_path = output('erddap', 'content')
erddap_flag_path = output('erddap', 'flag')
ftp_path = output('ftp')


def wait_for_files(path, number):
    # Wait for NetCDF to be created
    count = 0
    loops = 20
    while True:
        try:
Exemplo n.º 24
0
 def setUp(self):
     super().setUp()
     self.binary_path = resource('slocum', 'bass-20150407T1300', 'rt',
                                 'binary')
     self.ascii_path = resource('slocum', 'bass-20150407T1300', 'rt',
                                'ascii')
Exemplo n.º 25
0
 def setUp(self):
     super().setUp()
     self.binary_path = resource('slocum', 'ecometrics3', 'rt', 'binary')
     self.ascii_path = resource('slocum', 'ecometrics3', 'rt', 'ascii')
     self.netcdf_path = resource('slocum', 'ecometrics3', 'rt', 'netcdf')
     self.cache_path = resource('slocum', 'ecometrics3', 'config')
Exemplo n.º 26
0
 def setUp(self):
     super().setUp()
     self.binary_path = resource('slocum', 'bass-20160909T1733', 'rt',
                                 'binary')
     self.ascii_path = resource('slocum', 'bass-20160909T1733', 'rt',
                                'ascii')
Exemplo n.º 27
0
 def test_failing_testing_compliance(self):
     args = self.args(file=resource('should_fail.nc'))
     assert check_dataset(args) == 1
Exemplo n.º 28
0
 def setUp(self):
     super(TestSlocumReaderWithGPS, self).setUp()
     self.binary_path = resource('slocum', 'real', 'binary',
                                 'bass-20160909T1733')
     self.ascii_path = resource('slocum', 'real', 'ascii',
                                'bass-20160909T1733')