def test_load_ambiguous_data(tmp_path): # we deliberately setup a situation where two Dataset subclasses # that aren't parents are consisdered valid class FakeDataset(Dataset): @classmethod def _is_valid(cls, *args, **kwargs): return True class FakeDataset2(Dataset): @classmethod def _is_valid(cls, *args, **kwargs): return True try: with pytest.raises(YTAmbiguousDataType): load(tmp_path) finally: # tear down to avoid possible breakage in following tests output_type_registry.pop("FakeDataset") output_type_registry.pop("FakeDataset2")
def test_load_ambiguous_data(): # we deliberately setup a situation where two Dataset subclasses # that aren't parents are consisdered valid class FakeDataset(Dataset): @classmethod def _is_valid(cls, *args, **kwargs): return True class FakeDataset2(Dataset): @classmethod def _is_valid(cls, *args, **kwargs): return True try: with tempfile.TemporaryDirectory() as tmpdir: assert_raises(YTAmbiguousDataType, load, tmpdir) except Exception: raise finally: # tear down to avoid possible breakage in following tests output_type_registry.pop("FakeDataset") output_type_registry.pop("FakeDataset2")
def ambiguous_dataset_classes(): # We deliberately setup a situation where two Dataset subclasses # that aren't parents are consisdered valid. # We implement the bare minimum for these classes to be actually # loadable in order to test hints. class MockHierarchy(GridIndex): pass class MockDataset(Dataset): _index_class = MockHierarchy def _parse_parameter_file(self, *args, **kwargs): self.current_time = -1.0 self.cosmological_simulation = 0 def _set_code_unit_attributes(self, *args, **kwargs): self.length_unit = self.quan(1, "m") self.mass_unit = self.quan(1, "kg") self.time_unit = self.quan(1, "s") class AlphaDataset(MockDataset): @classmethod def _is_valid(cls, *args, **kwargs): return True class BetaDataset(MockDataset): @classmethod def _is_valid(cls, *args, **kwargs): return True yield # teardown to avoid possible breakage in following tests output_type_registry.pop("MockDataset") output_type_registry.pop("AlphaDataset") output_type_registry.pop("BetaDataset")
def test_init_fake_dataseries(): file_list = [f"fake_data_file_{str(i).zfill(4)}" for i in range(10)] with tempfile.TemporaryDirectory() as tmpdir: pfile_list = [Path(tmpdir) / file for file in file_list] sfile_list = [str(file) for file in pfile_list] for file in pfile_list: file.touch() pattern = Path(tmpdir) / "fake_data_file_*" # init from str pattern ts = DatasetSeries(pattern) assert ts._pre_outputs == sfile_list # init from Path pattern ppattern = Path(pattern) ts = DatasetSeries(ppattern) assert ts._pre_outputs == sfile_list # init form str list ts = DatasetSeries(sfile_list) assert ts._pre_outputs == sfile_list # init form Path list ts = DatasetSeries(pfile_list) assert ts._pre_outputs == pfile_list # rejected input type (str repr of a list) "[file1, file2, ...]" assert_raises(FileNotFoundError, DatasetSeries, str(file_list)) # finally, check that ts[0] fails to actually load assert_raises(YTUnidentifiedDataType, ts.__getitem__, 0) class FakeDataset(Dataset): """A minimal loadable fake dataset subclass""" @classmethod def _is_valid(cls, *args, **kwargs): return True def _parse_parameter_file(self): return def _set_code_unit_attributes(self): return def set_code_units(self): self.current_time = 0 return def _hash(self): return def _setup_classes(self): return try: ds = DatasetSeries(pattern)[0] assert isinstance(ds, FakeDataset) ts = DatasetSeries(pattern, my_unsupported_kwarg=None) assert_raises(TypeError, ts.__getitem__, 0) # the exact error message is supposed to be this # """__init__() got an unexpected keyword argument 'my_unsupported_kwarg'""" # but it's hard to check for within the framework finally: # tear down to avoid possible breakage in following tests output_type_registry.pop("FakeDataset")