def test_z_axis_method(self): merger = SlocumMerger(self.binary_path, self.ascii_path, cache_directory=self.cache_path, globs=['unit_507-2021-308*']) _ = merger.convert() dat_files = [ p for p in os.listdir(self.ascii_path) if p.endswith('.dat') ] for ascii_file in dat_files: args = dict(file=os.path.join(self.ascii_path, ascii_file), reader_class=SlocumReader, deployments_path=resource('slocum'), subset=True, template='slocum_dac', profile_id_type=1, tsint=10, filter_distance=1, filter_points=5, filter_time=10, filter_z=1, z_axis_method=2) create_dataset(**args) assert os.path.exists(self.netcdf_path) output_files = sorted(os.listdir(self.netcdf_path)) output_files = [ os.path.join(self.netcdf_path, o) for o in output_files ] assert len(output_files) == 28 # First profile with nc4.Dataset(output_files[0]) as ncd: assert ncd.variables['profile_id'].ndim == 0 assert ncd.variables['profile_id'][0] == 1636072703 # Last profile with nc4.Dataset(output_files[-1]) as ncd: assert ncd.variables['profile_id'].ndim == 0 assert ncd.variables['profile_id'][0] == 1636146248 # Check netCDF file for compliance ds = namedtuple('Arguments', ['file']) for o in output_files: assert check_dataset(ds(file=o)) == 0
def test_defaults(self): out_base = resource('slocum', 'bass-test-ascii', 'rt', 'netcdf') try: args = dict( file=resource('slocum', 'bass-test-ascii', 'rt', 'ascii', 'usf_bass_2016_253_0_6_sbd.dat'), reader_class=SlocumReader, deployments_path=resource('slocum'), subset=False, template='trajectory', profile_id_type=1, tsint=10, filter_distance=1, filter_points=5, filter_time=10, filter_z=1 ) create_dataset(**args) output_files = sorted(os.listdir(out_base)) output_files = [ os.path.join(out_base, o) for o in output_files ] assert len(output_files) == 32 # First profile with nc4.Dataset(output_files[0]) as ncd: assert ncd.variables['profile_id'].ndim == 0 assert ncd.variables['profile_id'][0] == 1473499526 # Last profile with nc4.Dataset(output_files[-1]) as ncd: assert ncd.variables['profile_id'].ndim == 0 assert ncd.variables['profile_id'][0] == 1473509128 # Check netCDF file for compliance ds = namedtuple('Arguments', ['file']) for o in output_files: assert check_dataset(ds(file=o)) == 0 finally: # Cleanup shutil.rmtree(out_base)
def test_all_ascii(self): out_base = resource('slocum', 'bass-test-ascii', 'rt', 'netcdf') try: for f in glob(resource('slocum', 'bass-test-ascii', 'rt', 'ascii', 'usf_bass*.dat')): args = dict( file=f, reader_class=SlocumReader, deployments_path=resource('slocum'), subset=False, template='ioos_ngdac', profile_id_type=2, tsint=10, filter_distance=1, filter_points=5, filter_time=10, filter_z=1 ) create_dataset(**args) output_files = sorted(os.listdir(out_base)) output_files = [ os.path.join(out_base, o) for o in output_files ] # First profile with nc4.Dataset(output_files[0]) as ncd: assert ncd.variables['profile_id'].ndim == 0 assert ncd.variables['profile_id'][0] == 0 # Last profile with nc4.Dataset(output_files[-1]) as ncd: assert ncd.variables['profile_id'].ndim == 0 assert ncd.variables['profile_id'][0] == len(output_files) - 1 # Check netCDF file for compliance ds = namedtuple('Arguments', ['file']) for o in output_files: assert check_dataset(ds(file=o)) == 0 finally: # Cleanup shutil.rmtree(out_base)
def test_all_ascii(self): out_base = resource('slocum', 'real', 'netcdf', 'bass-20160909T1733') safe_makedirs(out_base) for f in glob(resource('slocum', 'usf_bass*.dat')): args = dict( file=f, reader_class=SlocumReader, config_path=resource('slocum', 'config', 'bass-20160909T1733'), output_path=out_base, subset=False, template='ioos_ngdac', profile_id_type=2, tsint=10, filter_distance=1, filter_points=5, filter_time=10, filter_z=1 ) create_dataset(**args) output_files = sorted(os.listdir(out_base)) output_files = [ os.path.join(out_base, o) for o in output_files ] # First profile with nc4.Dataset(output_files[0]) as ncd: assert ncd.variables['profile_id'].ndim == 0 assert ncd.variables['profile_id'][0] == 0 # Last profile with nc4.Dataset(output_files[-1]) as ncd: assert ncd.variables['profile_id'].ndim == 0 assert ncd.variables['profile_id'][0] == len(output_files) - 1 # Check netCDF file for compliance ds = namedtuple('Arguments', ['file']) for o in output_files: assert check_dataset(ds(file=o)) == 0
def test_parameter_filters_override_config(self): out_base = resource('slocum', 'bass-test-filters-override', 'rt', 'netcdf') try: args = dict(file=resource('slocum', 'bass-test-filters-override', 'rt', 'ascii', 'usf_bass_2016_253_0_6_sbd.dat'), reader_class=SlocumReader, deployments_path=resource('slocum'), subset=True, template='ioos_ngdac', profile_id_type=1, tsint=None, filter_distance=None, filter_points=None, filter_time=None, filter_z=32) # This filters to a single profile create_dataset(**args) output_files = sorted(os.listdir(out_base)) output_files = [os.path.join(out_base, o) for o in output_files] assert len(output_files) == 1 # Only profile with nc4.Dataset(output_files[0]) as ncd: assert ncd.variables['profile_id'].ndim == 0 assert ncd.variables['profile_id'][0] == 1473507417 # Check netCDF file for compliance ds = namedtuple('Arguments', ['file']) for o in output_files: assert check_dataset(ds(file=o)) == 0 finally: # Cleanup shutil.rmtree(out_base)
def test_delayed(self): out_base = resource('slocum', 'real', 'netcdf', 'modena-2015') args = dict( file=resource('slocum', 'modena_2015_175_0_9_dbd.dat'), reader_class=SlocumReader, config_path=resource('slocum', 'config', 'modena-2015'), output_path=out_base, subset=False, template='trajectory', profile_id_type=1, tsint=10, filter_distance=1, filter_points=5, filter_time=10, filter_z=1 ) create_dataset(**args) output_files = sorted(os.listdir(out_base)) output_files = [ os.path.join(out_base, o) for o in output_files ] assert len(output_files) == 6 # First profile with nc4.Dataset(output_files[0]) as ncd: assert ncd.variables['profile_id'].ndim == 0 assert ncd.variables['profile_id'][0] == 1435257435 # Last profile with nc4.Dataset(output_files[-1]) as ncd: assert ncd.variables['profile_id'].ndim == 0 assert ncd.variables['profile_id'][0] == 1435264145 # Check netCDF file for compliance ds = namedtuple('Arguments', ['file']) for o in output_files: assert check_dataset(ds(file=o)) == 0
def test_defaults(self): out_base = resource('slocum', 'real', 'netcdf', 'bass-20160909T1733') args = dict( file=resource('slocum', 'usf_bass_2016_253_0_6_sbd.dat'), reader_class=SlocumReader, config_path=resource('slocum', 'config', 'bass-20160909T1733'), output_path=out_base, subset=False, template='trajectory', profile_id_type=1, tsint=10, filter_distance=1, filter_points=5, filter_time=10, filter_z=1 ) create_dataset(**args) output_files = sorted(os.listdir(out_base)) output_files = [ os.path.join(out_base, o) for o in output_files ] assert len(output_files) == 32 # First profile with nc4.Dataset(output_files[0]) as ncd: assert ncd.variables['profile_id'].ndim == 0 assert ncd.variables['profile_id'][0] == 1473499507 # Last profile with nc4.Dataset(output_files[-1]) as ncd: assert ncd.variables['profile_id'].ndim == 0 assert ncd.variables['profile_id'][0] == 1473509118 # Check netCDF file for compliance ds = namedtuple('Arguments', ['file']) for o in output_files: assert check_dataset(ds(file=o)) == 0
def process_IN_MOVED_TO(self, event): f = namedtuple('Check_Arguments', ['file']) args = f(file=event.pathname) if self.valid_extension(event.name) and check_dataset(args) == 0: self.upload_file(event)
def test_failing_testing_compliance(self): args = self.args(file=resource('should_fail.nc')) assert check_dataset(args) == 1
def process_IN_MOVED_TO(self, event): args = SimpleNamespace(file=event.pathname) if check_dataset(args) == 0: self.upload_file(event)