def test_GIVEN_nx_class_and_attributes_are_bytes_WHEN_output_to_json_THEN_they_are_written_as_utf8(
    file, ):
    dataset_name = "test_ds"
    dataset_value = 1
    dataset_dtype = np.int32

    dataset = file.create_dataset(dataset_name,
                                  data=dataset_value,
                                  dtype=dataset_dtype)
    test_nx_class = b"NXpinhole"
    test_string_attr = b"some_string"
    dataset.attrs["NX_class"] = test_nx_class
    dataset.attrs["string_attr"] = test_string_attr

    converter = NexusToDictConverter()
    root_dict = converter.convert(file)

    ds = root_dict["children"][0]

    for attribute in ds["attributes"]:
        assert attribute["name"] in ["NX_class", "string_attr"]
        if attribute["name"] == "NX_class":
            assert attribute["values"] == test_nx_class.decode("utf8")
        elif attribute["name"] == "string_attr":
            assert attribute["values"] == test_string_attr.decode("utf8")
def test_GIVEN_no_attributes_WHEN_adding_attributes_THEN_root_dict_is_not_changed(
        file):
    root_dict = dict()
    dataset = file.create_dataset("test", data=123)
    assert not dataset.attrs.keys()
    _add_attributes(dataset, root_dict)
    assert not root_dict
def test_UI_GIVEN_array_dataset_as_magnitude_WHEN_creating_translation_THEN_ui_is_filled_correctly(
    qtbot, file  # noqa:F811
):
    wrapper = NexusWrapper()
    instrument = Instrument(wrapper, {})

    component = instrument.create_component("test", "NXaperture", "")

    array = np.array([1, 2, 3, 4])

    x = 1
    y = 0
    z = 0
    transform = component.add_translation(QVector3D(x, y, z), name="test")

    transform.dataset = file.create_dataset("test", data=array)

    view = EditTranslation(parent=None, transformation=transform, instrument=instrument)
    qtbot.addWidget(view)

    assert view.transformation_frame.x_spinbox.value() == x
    assert view.transformation_frame.y_spinbox.value() == y
    assert view.transformation_frame.z_spinbox.value() == z
    assert np.allclose(view.transformation.dataset[...], array)
    assert (
        view.transformation_frame.magnitude_widget.field_type == FieldType.array_dataset
    )
def test_GIVEN_attribute_in_blacklist_WHEN_adding_attributes_THEN_attrs_is_blank(
        file):
    root_dict = dict()
    dataset_name = "test"
    dataset = file.create_dataset(dataset_name, data=123)
    attr_key = ATTR_NAME_BLACKLIST[0]
    attr_value = "some_value"
    dataset.attrs[attr_key] = attr_value
    _add_attributes(dataset, root_dict)
    assert not root_dict
def test_GIVEN_string_list_WHEN_getting_data_and_type_THEN_returns_correct_dtype(
        file):
    dataset_name = "ds"
    dataset_value = np.string_(["s", "t", "r"])

    dataset = file.create_dataset(dataset_name, data=dataset_value)

    data, dtype, size = get_data_and_type(dataset)

    assert data == [x.decode("ASCII") for x in list(dataset_value)]
    assert size == (len(dataset_value), )
Exemplo n.º 6
0
def test_GIVEN_existing_field_with_attr_which_is_in_blacklist_WHEN_editing_component_THEN_attr_is_not_filled_in(
        qtbot, file, field_attributes_dialog):
    attr_key = "units"
    attr_val = "m"

    ds = file.create_dataset(name="test", data=123)
    ds.attrs[attr_key] = attr_val

    field_attributes_dialog.fill_existing_attrs(ds)

    assert len(field_attributes_dialog.get_attrs()) == 0
Exemplo n.º 7
0
def test_GIVEN_existing_field_with_attr_WHEN_editing_component_THEN_both_field_and_attrs_are_filled_in_correctly(
        qtbot, file, attr_val, field_attributes_dialog):
    attr_key = "testattr"

    ds = file.create_dataset(name="test", data=123)
    ds.attrs[attr_key] = attr_val

    field_attributes_dialog.fill_existing_attrs(ds)

    assert len(field_attributes_dialog.get_attrs()) == 1
    assert field_attributes_dialog.get_attrs()[attr_key] == attr_val
Exemplo n.º 8
0
def test_GIVEN_attribute_value_is_byte_string_WHEN_filling_existing_values_THEN_string_is_decoded_in_lineedit(
        qtbot, field_attributes_dialog, file):
    attribute_value_string = "yards"

    ds = file.create_dataset(name="test", data=123)
    ds.attrs["testattr"] = attribute_value_string.encode("utf-8")

    field_attributes_dialog.fill_existing_attrs(ds)
    assert (field_attributes_dialog.list_widget.itemWidget(
        field_attributes_dialog.list_widget.item(
            0)).attr_value_lineedit.text() == attribute_value_string)
Exemplo n.º 9
0
def test_GIVEN_transformation_with_scalar_value_that_is_not_castable_to_int_WHEN_getting_ui_value_THEN_ui_placeholder_value_is_returned_instead(
        file,  # noqa: F811
):
    nexus_wrapper = NexusWrapper(str(uuid1()))
    transform_name = "transform_1"
    transform = create_transform(nexus_wrapper, transform_name)

    str_value = "sdfji"
    transform.dataset = file.create_dataset("test", data=str_value)

    assert transform.ui_value != str_value
    assert transform.ui_value == 0
Exemplo n.º 10
0
def test_GIVEN_attribute_WHEN_adding_attributes_THEN_attrs_are_added_to_root_dict(
        file):
    root_dict = dict()
    dataset_name = "test"
    dataset = file.create_dataset(dataset_name, data=123)
    attr_key = "something"
    attr_value = "some_value"
    dataset.attrs[attr_key] = attr_value
    _add_attributes(dataset, root_dict)
    assert root_dict["attributes"]
    assert root_dict["attributes"][0]["name"] == attr_key
    assert root_dict["attributes"][0]["values"] == attr_value
Exemplo n.º 11
0
def test_GIVEN_array_WHEN_getting_data_and_dtype_THEN_function_returns_correcte_fw_json_dtype_and_values(
    file, ):
    expected_dtype = "float"
    expected_values = [1.1, 1.2, 1.3]

    dataset = file.create_dataset("test_dataset",
                                  data=expected_values,
                                  dtype="float32")
    data, dtype, size = get_data_and_type(dataset)

    assert size == (len(expected_values), )
    assert np.allclose(data, expected_values)
    assert dtype == expected_dtype
Exemplo n.º 12
0
def test_GIVEN_float_WHEN_getting_data_and_type_THEN_returns_correct_dtype(
        file):
    dataset_name = "ds"
    dataset_type = np.float32
    dataset_value = np.float32(2.123)

    dataset = file.create_dataset(dataset_name,
                                  dtype=dataset_type,
                                  data=dataset_value)
    data, dtype, size = get_data_and_type(dataset)

    assert data == dataset_value
    assert dtype == "float"
    assert size == 1
Exemplo n.º 13
0
def test_GIVEN_single_string_WHEN_getting_data_and_dtype_THEN_function_returns_correct_fw_json_dtype(
    file, ):
    expected_dtype = "string"
    expected_size = 1
    expected_value = np.string_("udder")

    dataset = file.create_dataset("test_dataset",
                                  data=expected_value,
                                  dtype="S5")

    data, dtype, size = get_data_and_type(dataset)

    assert size == expected_size
    assert dtype == expected_dtype
    assert bytes(data, "ASCII") == expected_value
Exemplo n.º 14
0
def test_GIVEN_int64_WHEN_getting_data_and_dtype_THEN_function_returns_correct_fw_json_dtype(
    file, ):
    expected_dtype = "int64"
    expected_size = 1
    expected_value = np.int64(171_798_691_842)  # bigger than max 32b int

    dataset = file.create_dataset("test_dataset",
                                  dtype="int64",
                                  data=expected_value)

    data, dtype, size = get_data_and_type(dataset)

    assert size == expected_size
    assert dtype == expected_dtype
    assert data == expected_value
Exemplo n.º 15
0
def test_GIVEN_int32_WHEN_getting_data_and_dtype_THEN_function_returns_correct_fw_json_dtype(
    file, ):
    expected_dtype = "int32"
    expected_size = 1
    expected_value = np.int32(42)

    dataset = file.create_dataset("test_dataset",
                                  dtype="int32",
                                  data=expected_value)

    data, dtype, size = get_data_and_type(dataset)

    assert size == expected_size
    assert dtype == expected_dtype
    assert data == expected_value
Exemplo n.º 16
0
def test_GIVEN_float64_WHEN_getting_data_and_dtype_THEN_function_returns_correct_fw_json_dtype(
    file, ):
    expected_dtype = "double"
    expected_size = 1
    expected_value = np.float64(324.123_231_413_515_223_412_352_135_34)

    dataset = file.create_dataset("test_dataset",
                                  dtype=np.float64,
                                  data=expected_value)

    data, dtype, size = get_data_and_type(dataset)

    assert size == expected_size
    assert dtype == expected_dtype
    assert data == expected_value
Exemplo n.º 17
0
def test_GIVEN_dataset_with_an_array_attribute_WHEN_output_to_json_THEN_attribute_is_present_in_json(
        file, test_input):
    dataset_name = "test_ds"
    dataset_value = 1
    dataset_dtype = np.int32

    dataset = file.create_dataset(dataset_name,
                                  data=dataset_value,
                                  dtype=dataset_dtype)
    test_attr_name = "test_attr"
    dataset.attrs[test_attr_name] = test_input

    converter = NexusToDictConverter()
    root_dict = converter.convert(file)

    ds = root_dict["children"][0]

    assert ds["attributes"][0]["name"] == test_attr_name
    assert ds["attributes"][0]["values"].tolist() == test_input
Exemplo n.º 18
0
def test_GIVEN_multiple_values_WHEN_handling_dataset_THEN_size_field_does_exist_in_root_dict(
    file, ):
    dataset_name = "test_ds"
    dataset_value = [1.1, 1.2, 1.3]
    dataset_dtype = np.float

    dataset = file.create_dataset(dataset_name,
                                  data=dataset_value,
                                  dtype=dataset_dtype)
    dataset.attrs["NX_class"] = "NXpinhole"

    converter = NexusToDictConverter()
    root_dict = converter.convert(file)
    ds = root_dict["children"][0]

    assert ds["name"].lstrip("/") == dataset_name
    assert ds["type"] == "dataset"
    assert ds["values"] == dataset_value
    assert ds["dataset"]["size"] == (len(dataset_value), )