def test_init_fake_dataseries(): file_list = [f"fake_data_file_{str(i).zfill(4)}" for i in range(10)] with tempfile.TemporaryDirectory() as tmpdir: pfile_list = [Path(tmpdir) / file for file in file_list] sfile_list = [str(file) for file in pfile_list] for file in pfile_list: file.touch() pattern = Path(tmpdir) / "fake_data_file_*" # init from str pattern ts = DatasetSeries(pattern) assert ts._pre_outputs == sfile_list # init from Path pattern ppattern = Path(pattern) ts = DatasetSeries(ppattern) assert ts._pre_outputs == sfile_list # init form str list ts = DatasetSeries(sfile_list) assert ts._pre_outputs == sfile_list # init form Path list ts = DatasetSeries(pfile_list) assert ts._pre_outputs == pfile_list # rejected input type (str repr of a list) "[file1, file2, ...]" assert_raises(FileNotFoundError, DatasetSeries, str(file_list)) # finally, check that ts[0] fails to actually load assert_raises(YTUnidentifiedDataType, ts.__getitem__, 0)
def test_store(): ds = yt.load(G30) store = ds.parameter_filename + ".yt" field = "density" if os.path.isfile(store): os.remove(store) proj1 = ds.proj(field, "z") sp = ds.sphere(ds.domain_center, (4, "kpc")) proj2 = ds.proj(field, "z", data_source=sp) proj1_c = ds.proj(field, "z") assert_equal(proj1[field], proj1_c[field]) proj2_c = ds.proj(field, "z", data_source=sp) assert_equal(proj2[field], proj2_c[field]) def fail_for_different_method(): proj2_c = ds.proj(field, "z", data_source=sp, method="mip") assert_equal(proj2[field], proj2_c[field]) # A note here: a unyt.exceptions.UnitOperationError is raised # and caught by numpy, which reraises a ValueError assert_raises(ValueError, fail_for_different_method) def fail_for_different_source(): sp = ds.sphere(ds.domain_center, (2, "kpc")) proj2_c = ds.proj(field, "z", data_source=sp, method="integrate") assert_equal(proj2_c[field], proj2[field]) assert_raises(AssertionError, fail_for_different_source)
def test_store(): ds = yt.load(G30) store = ds.parameter_filename + '.yt' field = "density" if os.path.isfile(store): os.remove(store) proj1 = ds.proj(field, "z") sp = ds.sphere(ds.domain_center, (4, 'kpc')) proj2 = ds.proj(field, "z", data_source=sp) proj1_c = ds.proj(field, "z") assert_equal(proj1[field], proj1_c[field]) proj2_c = ds.proj(field, "z", data_source=sp) assert_equal(proj2[field], proj2_c[field]) def fail_for_different_method(): proj2_c = ds.proj(field, "z", data_source=sp, method="mip") return np.equal(proj2[field], proj2_c[field]).all() assert_raises(YTUnitOperationError, fail_for_different_method) def fail_for_different_source(): sp = ds.sphere(ds.domain_center, (2, 'kpc')) proj2_c = ds.proj(field, "z", data_source=sp, method="integrate") return assert_equal(proj2_c[field], proj2[field]) assert_raises(AssertionError, fail_for_different_source)
def test_validation(): dims = np.array([4, 2, 4]) grid_data = [ dict( left_edge=[0.0, 0.0, 0.0], right_edge=[1.0, 1.0, 1.0], level=0, dimensions=dims, ), dict( left_edge=[0.25, 0.25, 0.25], right_edge=[0.75, 0.75, 0.75], level=1, dimensions=dims, ), ] bbox = np.array([[0, 1], [0, 1], [0, 1]]) def load_grids(): load_amr_grids( grid_data, dims, bbox=bbox, periodicity=(0, 0, 0), length_unit=1.0, refine_by=2, ) assert_raises(YTIllDefinedAMR, load_grids)
def test_center_error(): ds = fake_random_ds(16, nprocs=16) with assert_raises(YTFieldNotFound): ds.sphere("min_non_existing_field_name", (0.25, "unitary")) with assert_raises(YTFieldNotFound): ds.sphere("max_non_existing_field_name", (0.25, "unitary"))
def test_add_field_unit_semantics(): ds = fake_random_ds(16) ad = ds.all_data() def density_alias(field, data): return data['density'].in_cgs() def unitless_data(field, data): return np.ones(data['density'].shape) ds.add_field(('gas','density_alias_no_units'), sampling_type='cell', function=density_alias) ds.add_field(('gas','density_alias_auto'), sampling_type='cell', function=density_alias, units='auto', dimensions='density') ds.add_field(('gas','density_alias_wrong_units'), function=density_alias, sampling_type='cell', units='m/s') ds.add_field(('gas','density_alias_unparseable_units'), sampling_type='cell', function=density_alias, units='dragons') ds.add_field(('gas','density_alias_auto_wrong_dims'), function=density_alias, sampling_type='cell', units='auto', dimensions="temperature") assert_raises(YTFieldUnitError, get_data, ds, 'density_alias_no_units') assert_raises(YTFieldUnitError, get_data, ds, 'density_alias_wrong_units') assert_raises(YTFieldUnitParseError, get_data, ds, 'density_alias_unparseable_units') assert_raises(YTDimensionalityError, get_data, ds, 'density_alias_auto_wrong_dims') dens = ad['density_alias_auto'] assert_equal(str(dens.units), 'g/cm**3') ds.add_field(('gas','dimensionless'), sampling_type='cell', function=unitless_data) ds.add_field(('gas','dimensionless_auto'), function=unitless_data, sampling_type='cell', units='auto', dimensions='dimensionless') ds.add_field(('gas','dimensionless_explicit'), function=unitless_data, sampling_type='cell', units='') ds.add_field(('gas','dimensionful'), sampling_type='cell', function=unitless_data, units='g/cm**3') assert_equal(str(ad['dimensionless'].units), 'dimensionless') assert_equal(str(ad['dimensionless_auto'].units), 'dimensionless') assert_equal(str(ad['dimensionless_explicit'].units), 'dimensionless') assert_raises(YTFieldUnitError, get_data, ds, 'dimensionful')
def test_typing_error_detection(): invalid_schema = {"length_unit": "1m"} # this is the error that is raised by unyt on bad input assert_raises(RuntimeError, mock_quan, invalid_schema["length_unit"]) # check that the sanitizer function is able to catch the # type issue before passing down to unyt assert_raises(TypeError, Dataset._sanitize_units_override, invalid_schema)
def test_minimal_sphere_bad_inputs(): ds = fake_random_ds(16, nprocs=8, particles=100) pos = ds.r[("all", "particle_position")] ## Check number of points >= 2 # -> should fail assert_raises(YTException, ds.minimal_sphere, pos[:1, :]) # -> should not fail ds.minimal_sphere(pos[:2, :])
def testCustomField(self): msg = f"INFO:yt:Loading plugins from {self.plugin_path}" with self.assertLogs("yt", level="INFO") as cm: yt.enable_plugins() self.assertEqual(cm.output, [msg]) ds = fake_random_ds(16) dd = ds.all_data() self.assertEqual(str(dd[("gas", "random")].units), "dimensionless") self.assertEqual(dd[("gas", "random")].shape, dd[("gas", "density")].shape) assert yt.myfunc() == 12 assert_raises(AttributeError, getattr, yt, "foobar")
def test_find_points(): """Main test suite for MatchPoints""" num_points = 100 test_ds = setup_test_ds() randx = np.random.uniform( low=test_ds.domain_left_edge[0], high=test_ds.domain_right_edge[0], size=num_points, ) randy = np.random.uniform( low=test_ds.domain_left_edge[1], high=test_ds.domain_right_edge[1], size=num_points, ) randz = np.random.uniform( low=test_ds.domain_left_edge[2], high=test_ds.domain_right_edge[2], size=num_points, ) point_grids, point_grid_inds = test_ds.index._find_points( randx, randy, randz) grid_inds = np.zeros((num_points), dtype="int64") for ind, ixx, iyy, izz in zip(range(num_points), randx, randy, randz): pos = np.array([ixx, iyy, izz]) pt_level = -1 for grid in test_ds.index.grids: if (np.all(pos >= grid.LeftEdge) and np.all(pos <= grid.RightEdge) and grid.Level > pt_level): pt_level = grid.Level grid_inds[ind] = grid.id - grid._id_offset assert_equal(point_grid_inds, grid_inds) # Test whether find_points works for lists point_grids, point_grid_inds = test_ds.index._find_points( randx.tolist(), randy.tolist(), randz.tolist()) assert_equal(point_grid_inds, grid_inds) # Test if find_points works for scalar ind = random.randint(0, num_points - 1) point_grids, point_grid_inds = test_ds.index._find_points( randx[ind], randy[ind], randz[ind]) assert_equal(point_grid_inds, grid_inds[ind]) # Test if find_points fails properly for non equal indices' array sizes assert_raises(AssertionError, test_ds.index._find_points, [0], 1.0, [2, 3])
def test_get_morton_indices(): from yt.utilities.lib.geometry_utils import get_morton_indices, get_morton_indices_unravel INDEX_MAX_64 = np.uint64(2097151) li = np.arange(6, dtype=np.uint64).reshape((2, 3)) mi_ans = np.array([10, 229], dtype=np.uint64) mi_out = get_morton_indices(li) mi_out2 = get_morton_indices_unravel(li[:, 0], li[:, 1], li[:, 2]) assert_array_equal(mi_out, mi_ans) assert_array_equal(mi_out2, mi_ans) li[0, :] = INDEX_MAX_64 * np.ones(3, dtype=np.uint64) assert_raises(ValueError, get_morton_indices, li) assert_raises(ValueError, get_morton_indices_unravel, li[:, 0], li[:, 1], li[:, 2])
def testCustomField(self): plugin_file = os.path.join(CONFIG_DIR, ytcfg.get("yt", "plugin_filename")) msg = f"INFO:yt:Loading plugins from {plugin_file}" with self.assertLogs("yt", level="INFO") as cm: yt.enable_plugins() self.assertEqual(cm.output, [msg]) ds = fake_random_ds(16) dd = ds.all_data() self.assertEqual(str(dd["random"].units), "dimensionless") self.assertEqual(dd["random"].shape, dd["density"].shape) assert yt.myfunc() == 12 assert_raises(AttributeError, getattr, yt, "foobar")
def test_unequal_bin_field_profile(self): density = np.random.random(128) temperature = np.random.random(127) mass = np.random.random((128, 128)) my_data = { ("gas", "density"): density, ("gas", "temperature"): temperature, ("gas", "mass"): mass, } fake_ds_med = {"current_time": yt.YTQuantity(10, "Myr")} field_types = {field: "gas" for field in my_data.keys()} yt.save_as_dataset(fake_ds_med, "mydata.h5", my_data, field_types=field_types) ds = yt.load("mydata.h5") with assert_raises(YTProfileDataShape): yt.PhasePlot( ds.data, ("gas", "temperature"), ("gas", "density"), ("gas", "mass"), )
def test_normalisations_vel_and_length(): # test forbidden case: both velocity and temperature are specified as overrides overrides = dict(length_unit=length_unit, velocity_unit=velocity_unit, temperature_unit=temperature_unit) with assert_raises(ValueError): data_dir_load(khi_cartesian_2D, kwargs={'units_override': overrides})
def test_normalisations_too_many_args(): # test forbidden case: too many arguments (max 3 are allowed) overrides = dict(length_unit=length_unit, numberdensity_unit=numberdensity_unit, temperature_unit=temperature_unit, time_unit=time_unit) with assert_raises(ValueError): data_dir_load(khi_cartesian_2D, kwargs={'units_override': overrides})
def test_unequal_bin_field_profile(self): density = np.random.random(128) temperature = np.random.random(127) cell_mass = np.random.random((128, 128)) my_data = { "density": density, "temperature": temperature, "cell_mass": cell_mass} fake_ds_med = {"current_time": yt.YTQuantity(10, "Myr")} yt.save_as_dataset(fake_ds_med, "mydata.h5", my_data) ds = yt.load('mydata.h5') assert_raises( YTProfileDataShape, yt.PhasePlot, ds.data, 'temperature', 'density', 'cell_mass')
def testCustomField(self): plugin_file = os.path.join(CONFIG_DIR, ytcfg.get('yt', 'pluginfilename')) msg = 'INFO:yt:Loading plugins from %s' % plugin_file if sys.version_info >= (3, 4, 0): with self.assertLogs('yt', level='INFO') as cm: yt.enable_plugins() self.assertEqual(cm.output, [msg]) else: yt.enable_plugins() ds = fake_random_ds(16) dd = ds.all_data() self.assertEqual(str(dd['random'].units), 'dimensionless') self.assertEqual(dd['random'].shape, dd['density'].shape) assert yt.myfunc() == 4 assert_raises(AttributeError, getattr, yt, 'foobar')
def test_plot_particle_field_error(): ds = fake_random_ds(32, particles=100) field_names = [ ("all", "particle_mass"), [("all", "particle_mass"), ("gas", "density")], [("gas", "density"), ("all", "particle_mass")], ] objects_normals = [ (SlicePlot, 2), (SlicePlot, [1, 1, 1]), (ProjectionPlot, 2), (OffAxisProjectionPlot, [1, 1, 1]), ] for object, normal in objects_normals: for field_name_list in field_names: assert_raises(YTInvalidFieldType, object, ds, normal, field_name_list)
def test_ellipsis_selection(): ds = fake_amr_ds() reg = ds.r[:, :, :] ereg = ds.r[...] assert_array_equal(reg.fwidth, ereg.fwidth) reg = ds.r[(0.5, "cm"), :, :] ereg = ds.r[(0.5, "cm"), ...] assert_array_equal(reg.fwidth, ereg.fwidth) reg = ds.r[:, :, (0.5, "cm")] ereg = ds.r[..., (0.5, "cm")] assert_array_equal(reg.fwidth, ereg.fwidth) reg = ds.r[:, :, (0.5, "cm")] ereg = ds.r[..., (0.5, "cm")] assert_array_equal(reg.fwidth, ereg.fwidth) assert_raises(IndexError, ds.r.__getitem__, (..., (0.5, "cm"), ...))
def test_invalid_max_level(): invalid_value_args = ( (1, None), (1, "foo"), (1, "bar"), (-1, "yt"), ) for lvl, convention in invalid_value_args: with assert_raises(ValueError): yt.load(output_00080, max_level=lvl, max_level_convention=convention) invalid_type_args = ( (1.0, "yt"), # not an int ("invalid", "yt"), ) # Should fail with value errors for lvl, convention in invalid_type_args: with assert_raises(TypeError): yt.load(output_00080, max_level=lvl, max_level_convention=convention)
def test_inconsistent_field_shape(): def load_field_field_mismatch(): d = np.random.uniform(size=(32, 32, 32)) t = np.random.uniform(size=(32, 64, 32)) data = {"density": d, "temperature": t} load_uniform_grid(data, (32,32,32)) assert_raises(YTInconsistentGridFieldShape, load_field_field_mismatch) def load_field_grid_mismatch(): d = np.random.uniform(size=(32, 32, 32)) t = np.random.uniform(size=(32, 32, 32)) data = {"density": d, "temperature": t} load_uniform_grid(data, (32,64,32)) assert_raises(YTInconsistentGridFieldShapeGridDims, load_field_grid_mismatch) def load_particle_fields_mismatch(): x = np.random.uniform(size=100) y = np.random.uniform(size=100) z = np.random.uniform(size=200) data = {"particle_position_x": x, "particle_position_y": y, "particle_position_z": z} load_particles(data) assert_raises(YTInconsistentParticleFieldShape, load_particle_fields_mismatch)
def test_reject_invalid_normal_vector(): ds = fake_amr_ds(geometry="cartesian") for ui in [0.0, 1.0, 2.0, 3.0]: # acceptable scalar numeric values are restricted to integers. # Floats might be a sign that something went wrong upstream # e.g., rounding errors, parsing error... assert_raises(TypeError, NormalPlot.sanitize_normal_vector, ds, ui) for ui in [ "X", "xy", "not-an-axis", (0, 0, 0), [0, 0, 0], np.zeros(3), [1, 0, 0, 0], [1, 0], [1], [0], 3, 10, ]: assert_raises(ValueError, NormalPlot.sanitize_normal_vector, ds, ui)
def test_load_ambiguous_data(): # we deliberately setup a situation where two Dataset subclasses # that aren't parents are consisdered valid class FakeDataset(Dataset): @classmethod def _is_valid(cls, *args, **kwargs): return True class FakeDataset2(Dataset): @classmethod def _is_valid(cls, *args, **kwargs): return True try: with tempfile.TemporaryDirectory() as tmpdir: assert_raises(YTAmbiguousDataType, load, tmpdir) except Exception: raise finally: # tear down to avoid possible breakage in following tests output_type_registry.pop("FakeDataset") output_type_registry.pop("FakeDataset2")
def test_qt_overflow(): grid_data = [] grid_dict = {} grid_dict['left_edge'] = [-1.0, -1.0, -1.0] grid_dict['right_edge'] = [1.0, 1.0, 1.0] grid_dict['dimensions'] = [8, 8, 8] grid_dict['level'] = 0 grid_dict['density'] = np.ones((8,8,8)) grid_data.append(grid_dict) domain_dimensions = np.array([8, 8, 8]) spf = load_amr_grids(grid_data, domain_dimensions) def make_proj(): p = ProjectionPlot(spf, 'x', ["density"], center='c', origin='native') return p assert_raises(YTIntDomainOverflow, make_proj)
def test_qt_overflow(): grid_data = [] grid_dict = {} grid_dict["left_edge"] = [-1.0, -1.0, -1.0] grid_dict["right_edge"] = [1.0, 1.0, 1.0] grid_dict["dimensions"] = [8, 8, 8] grid_dict["level"] = 0 grid_dict["density"] = np.ones((8, 8, 8)) grid_data.append(grid_dict) domain_dimensions = np.array([8, 8, 8]) spf = load_amr_grids(grid_data, domain_dimensions) def make_proj(): p = ProjectionPlot(spf, "x", ["density"], center="c", origin="native") return p assert_raises(YTIntDomainOverflow, make_proj)
def test_load_unidentified_data(): with tempfile.TemporaryDirectory() as tmpdir: empty_file_path = Path(tmpdir) / "empty_file" empty_file_path.touch() assert_raises(YTOutputNotIdentified, load, tmpdir) assert_raises(YTOutputNotIdentified, load, empty_file_path) assert_raises( YTSimulationNotIdentified, simulation, tmpdir, "unregistered_simulation_type", ) assert_raises( YTSimulationNotIdentified, simulation, empty_file_path, "unregistered_simulation_type", )
def test_load_nonexistent_data(): with tempfile.TemporaryDirectory() as tmpdir: assert_raises(FileNotFoundError, load, os.path.join(tmpdir, "not_a_file")) assert_raises(FileNotFoundError, simulation, os.path.join(tmpdir, "not_a_file"), "Enzo") # this one is a design choice: it is preferable to report the most important # problem in an error message (missing data is worse than a typo in # simulation_type), so we make sure the error raised is not YTSimulationNotIdentified assert_raises( FileNotFoundError, simulation, os.path.join(tmpdir, "not_a_file"), "unregistered_simulation_type", )
def test_invalid_schema_detection(): invalid_key_schemas = [ { "len_unit": 1.0 }, # plain invalid key { "lenght_unit": 1.0 }, # typo ] for invalid_schema in invalid_key_schemas: assert_raises(ValueError, Dataset._sanitize_units_override, invalid_schema) invalid_val_schemas = [ { "length_unit": [1, 1, 1] }, # len(val) > 2 { "length_unit": [1, 1, 1, 1, 1] }, # "data type not understood" in unyt ] for invalid_schema in invalid_val_schemas: assert_raises(TypeError, Dataset._sanitize_units_override, invalid_schema) # 0 shouldn't make sense invalid_number_schemas = [ { "length_unit": 0 }, { "length_unit": [0] }, { "length_unit": (0, ) }, { "length_unit": (0, "cm") }, ] for invalid_schema in invalid_number_schemas: assert_raises(ValueError, Dataset._sanitize_units_override, invalid_schema)
def test_invalid_level(): # these are the exceptions raised by logging.Logger.setLog # since they are perfectly clear and readable, we check that nothing else # happens in the wrapper assert_raises(TypeError, set_log_level, 1.5) assert_raises(ValueError, set_log_level, "invalid_level")
def test_load_empty_file(self): assert_raises(YTOutputNotIdentified, data_dir_load, "not_a_file") assert_raises(YTOutputNotIdentified, data_dir_load, "empty_file") assert_raises(YTOutputNotIdentified, data_dir_load, "empty_directory")
def test_particle_profile_negative_field(): # see Issue #1340 n_particles = int(1e4) ppx, ppy, ppz = np.random.normal(size=[3, n_particles]) pvx, pvy, pvz = -np.ones((3, n_particles)) data = { 'particle_position_x': ppx, 'particle_position_y': ppy, 'particle_position_z': ppz, 'particle_velocity_x': pvx, 'particle_velocity_y': pvy, 'particle_velocity_z': pvz } bbox = 1.1 * np.array([[min(ppx), max(ppx)], [min(ppy), max(ppy)], [min(ppz), max(ppz)]]) ds = yt.load_particles(data, bbox=bbox) ad = ds.all_data() profile = yt.create_profile(ad, ["particle_position_x", "particle_position_y"], "particle_velocity_x", logs={ 'particle_position_x': True, 'particle_position_y': True, 'particle_position_z': True }, weight_field=None) assert profile['particle_velocity_x'].min() < 0 assert profile.x_bins.min() > 0 assert profile.y_bins.min() > 0 profile = yt.create_profile(ad, ["particle_position_x", "particle_position_y"], "particle_velocity_x", weight_field=None) assert profile['particle_velocity_x'].min() < 0 assert profile.x_bins.min() < 0 assert profile.y_bins.min() < 0 # can't use CIC deposition with log-scaled bin fields with assert_raises(RuntimeError): yt.create_profile(ad, ["particle_position_x", "particle_position_y"], "particle_velocity_x", logs={ 'particle_position_x': True, 'particle_position_y': False, 'particle_position_z': False }, weight_field=None, deposition='cic') # can't use CIC deposition with accumulation or fractional with assert_raises(RuntimeError): yt.create_profile(ad, ["particle_position_x", "particle_position_y"], "particle_velocity_x", logs={ 'particle_position_x': False, 'particle_position_y': False, 'particle_position_z': False }, weight_field=None, deposition='cic', accumulation=True, fractional=True)