Esempio n. 1
0
def test_save_load_cycle():
    sig_reload = None
    signal = hs.load(file2)
    serial = signal.original_metadata['blockfile_header']['Acquisition_time']
    date, time, timezone = serial_date_to_ISO_format(serial)
    nt.assert_equal(signal.metadata.General.original_filename, 'test2.blo')
    nt.assert_equal(signal.metadata.General.date, date)
    nt.assert_equal(signal.metadata.General.time, time)
    nt.assert_equal(signal.metadata.General.time_zone, timezone)
    nt.assert_equal(
        signal.metadata.General.notes,
        "Precession angle : \r\nPrecession Frequency : \r\nCamera gamma : on")
    signal.save(save_path, overwrite=True)
    sig_reload = hs.load(save_path)
    np.testing.assert_equal(signal.data, sig_reload.data)
    nt.assert_equal(signal.axes_manager.as_dictionary(),
                    sig_reload.axes_manager.as_dictionary())
    nt.assert_equal(signal.original_metadata.as_dictionary(),
                    sig_reload.original_metadata.as_dictionary())
    # change original_filename to make the metadata of both signals equals
    sig_reload.metadata.General.original_filename = signal.metadata.General.original_filename
    assert_deep_almost_equal(signal.metadata.as_dictionary(),
                             sig_reload.metadata.as_dictionary())
    nt.assert_equal(signal.metadata.General.date,
                    sig_reload.metadata.General.date)
    nt.assert_equal(signal.metadata.General.time,
                    sig_reload.metadata.General.time)
    nt.assert_is_instance(signal, hs.signals.Signal2D)
    # Delete reference to close memmap file!
    del sig_reload
    gc.collect()
    _remove_file(save_path)
Esempio n. 2
0
def test_save_load_cycle(save_path):
    sig_reload = None
    signal = hs.load(FILE2)
    serial = signal.original_metadata['blockfile_header']['Acquisition_time']
    date, time, timezone = serial_date_to_ISO_format(serial)
    assert signal.metadata.General.original_filename == 'test2.blo'
    assert signal.metadata.General.date == date
    assert signal.metadata.General.time == time
    assert signal.metadata.General.time_zone == timezone
    assert (
        signal.metadata.General.notes ==
        "Precession angle : \r\nPrecession Frequency : \r\nCamera gamma : on")
    signal.save(save_path, overwrite=True)
    sig_reload = hs.load(save_path)
    np.testing.assert_equal(signal.data, sig_reload.data)
    assert (signal.axes_manager.as_dictionary() ==
            sig_reload.axes_manager.as_dictionary())
    assert (signal.original_metadata.as_dictionary() ==
            sig_reload.original_metadata.as_dictionary())
    # change original_filename to make the metadata of both signals equals
    sig_reload.metadata.General.original_filename = signal.metadata.General.original_filename
    assert_deep_almost_equal(signal.metadata.as_dictionary(),
                             sig_reload.metadata.as_dictionary())
    assert (
        signal.metadata.General.date ==
        sig_reload.metadata.General.date)
    assert (
        signal.metadata.General.time ==
        sig_reload.metadata.General.time)
    assert isinstance(signal, hs.signals.Signal2D)
    # Delete reference to close memmap file!
    del sig_reload
Esempio n. 3
0
def test_save_load_cycle(save_path, convert_units):
    sig_reload = None
    signal = hs.load(FILE2, convert_units=convert_units)
    serial = signal.original_metadata['blockfile_header']['Acquisition_time']
    date, time, timezone = serial_date_to_ISO_format(serial)
    assert signal.metadata.General.original_filename == 'test2.blo'
    assert signal.metadata.General.date == date
    assert signal.metadata.General.time == time
    assert signal.metadata.General.time_zone == timezone
    assert (
        signal.metadata.General.notes ==
        "Precession angle : \r\nPrecession Frequency : \r\nCamera gamma : on")
    signal.save(save_path, overwrite=True)
    sig_reload = hs.load(save_path, convert_units=convert_units)
    np.testing.assert_equal(signal.data, sig_reload.data)
    assert (signal.axes_manager.as_dictionary() ==
            sig_reload.axes_manager.as_dictionary())
    assert (signal.original_metadata.as_dictionary() ==
            sig_reload.original_metadata.as_dictionary())
    # change original_filename to make the metadata of both signals equals
    sig_reload.metadata.General.original_filename = signal.metadata.General.original_filename
    assert_deep_almost_equal(signal.metadata.as_dictionary(),
                             sig_reload.metadata.as_dictionary())
    assert (signal.metadata.General.date == sig_reload.metadata.General.date)
    assert (signal.metadata.General.time == sig_reload.metadata.General.time)
    assert isinstance(signal, hs.signals.Signal2D)
    # Delete reference to close memmap file!
    del sig_reload
Esempio n. 4
0
def test_serial_date_to_ISO_format():
    iso_1 = dtt.serial_date_to_ISO_format(serial1)
    dt1_local = dt1.astimezone(tz.tzlocal())
    assert iso_1[0] == dt1_local.date().isoformat()
    assert iso_1[1] == dt1_local.time().isoformat()
    assert iso_1[2] == dt1_local.tzname()

    iso_2 = dtt.serial_date_to_ISO_format(serial2)
    dt2_local = dt2.astimezone(tz.tzlocal())
    assert iso_2[0] == dt2_local.date().isoformat()
    # The below line will/can fail due to accuracy loss when converting to serial date:
    # We therefore truncate milli/micro seconds
    assert iso_2[1][:8] == dt2_local.time().isoformat()
    assert iso_2[2] == dt2_local.tzname()

    iso_3 = dtt.serial_date_to_ISO_format(serial3)
    dt3_aware = dt3.replace(tzinfo=tz.tzutc())
    dt3_local = dt3_aware.astimezone(tz.tzlocal())
    assert iso_3[0] == dt3_local.date().isoformat()
    assert iso_3[1] == dt3_local.time().isoformat()
    assert iso_3[2] == dt3_local.tzname()
Esempio n. 5
0
def test_serial_date_to_ISO_format():
    iso_1 = dtt.serial_date_to_ISO_format(serial1)
    dt1_local = dt1.astimezone(tz.tzlocal())
    assert iso_1[0] == dt1_local.date().isoformat()
    assert iso_1[1] == dt1_local.time().isoformat()
    assert iso_1[2] == dt1_local.tzname()

    iso_2 = dtt.serial_date_to_ISO_format(serial2)
    dt2_local = dt2.astimezone(tz.tzlocal())
    assert iso_2[0] == dt2_local.date().isoformat()
    # The below line will/can fail due to accuracy loss when converting to serial date:
    # We therefore truncate milli/micro seconds
    assert iso_2[1][:8] == dt2_local.time().isoformat()
    assert iso_2[2] == dt2_local.tzname()

    iso_3 = dtt.serial_date_to_ISO_format(serial3)
    dt3_aware = dt3.replace(tzinfo=tz.tzutc())
    dt3_local = dt3_aware.astimezone(tz.tzlocal())
    assert iso_3[0] == dt3_local.date().isoformat()
    assert iso_3[1] == dt3_local.time().isoformat()
    assert iso_3[2] == dt3_local.tzname()
Esempio n. 6
0
def test_save_load_cycle(save_path, convert_units):
    sig_reload = None
    signal = hs.load(FILE2, convert_units=convert_units)
    serial = signal.original_metadata["blockfile_header"]["Acquisition_time"]
    date, time, timezone = serial_date_to_ISO_format(serial)
    assert signal.metadata.General.original_filename == "test2.blo"
    assert signal.metadata.General.date == date
    assert signal.metadata.General.time == time
    assert signal.metadata.General.time_zone == timezone
    assert (
        signal.metadata.General.notes ==
        "Precession angle : \r\nPrecession Frequency : \r\nCamera gamma : on")
    signal.save(save_path, overwrite=True)
    sig_reload = hs.load(save_path, convert_units=convert_units)
    np.testing.assert_equal(signal.data, sig_reload.data)
    assert (signal.axes_manager.as_dictionary() ==
            sig_reload.axes_manager.as_dictionary())
    assert (signal.original_metadata.as_dictionary() ==
            sig_reload.original_metadata.as_dictionary())
    # change original_filename to make the metadata of both signals equals
    sig_reload.metadata.General.original_filename = (
        signal.metadata.General.original_filename)
    # assert file reading tests here, then delete so we can compare
    # entire metadata structure at once:
    plugin = 'hyperspy.io_plugins.blockfile'
    assert signal.metadata.General.FileIO.Number_0.operation == 'load'
    assert signal.metadata.General.FileIO.Number_0.io_plugin == plugin
    assert signal.metadata.General.FileIO.Number_1.operation == 'save'
    assert signal.metadata.General.FileIO.Number_1.io_plugin == plugin
    assert sig_reload.metadata.General.FileIO.Number_0.operation == 'load'
    assert sig_reload.metadata.General.FileIO.Number_0.io_plugin == plugin
    del signal.metadata.General.FileIO
    del sig_reload.metadata.General.FileIO

    assert_deep_almost_equal(signal.metadata.as_dictionary(),
                             sig_reload.metadata.as_dictionary())
    assert signal.metadata.General.date == sig_reload.metadata.General.date
    assert signal.metadata.General.time == sig_reload.metadata.General.time
    assert isinstance(signal, hs.signals.Signal2D)
    # Delete reference to close memmap file!
    del sig_reload
Esempio n. 7
0
def file_reader(filename, endianess='<',  mmap_mode='c',
                lazy=False, **kwds):
    _logger.debug("Reading blockfile: %s" % filename)
    metadata = {}
    # Makes sure we open in right mode:
    if '+' in mmap_mode or ('write' in mmap_mode and
                            'copyonwrite' != mmap_mode):
        if lazy:
            raise ValueError("Lazy loading does not support in-place writing")
        f = open(filename, 'r+b')
    else:
        f = open(filename, 'rb')
    _logger.debug("File opened")

    # Get header
    header = np.fromfile(f, dtype=get_header_dtype_list(endianess), count=1)
    if header['MAGIC'][0] not in magics:
        warnings.warn("Blockfile has unrecognized header signature. "
                      "Will attempt to read, but correcteness not guaranteed!")
    header = sarray2dict(header)
    note = f.read(header['Data_offset_1'] - f.tell())
    note = note.strip(b'\x00')
    header['Note'] = note.decode()
    _logger.debug("File header: " + str(header))
    NX, NY = int(header['NX']), int(header['NY'])
    DP_SZ = int(header['DP_SZ'])
    if header['SDP']:
        SDP = 100. / header['SDP']
    else:
        SDP = Undefined
    original_metadata = {'blockfile_header': header}

    # Get data:

    # A Virtual BF/DF is stored first
#    offset1 = header['Data_offset_1']
#    f.seek(offset1)
#    data_pre = np.array(f.read(NX*NY), dtype=endianess+'u1'
#        ).squeeze().reshape((NX, NY), order='C').T

    # Then comes actual blockfile
    offset2 = header['Data_offset_2']
    if not lazy:
        f.seek(offset2)
        data = np.fromfile(f, dtype=endianess + 'u1')
    else:
        data = np.memmap(f, mode=mmap_mode, offset=offset2,
                         dtype=endianess + 'u1')
    try:
        data = data.reshape((NY, NX, DP_SZ * DP_SZ + 6))
    except ValueError:
        warnings.warn(
            'Blockfile header dimensions larger than file size! '
            'Will attempt to load by zero padding incomplete frames.')
        # Data is stored DP by DP:
        pw = [(0, NX * NY * (DP_SZ * DP_SZ + 6) - data.size)]
        data = np.pad(data, pw, mode='constant')
        data = data.reshape((NY, NX, DP_SZ * DP_SZ + 6))

    # Every frame is preceeded by a 6 byte sequence (AA 55, and then a 4 byte
    # integer specifying frame number)
    data = data[:, :, 6:]
    data = data.reshape((NY, NX, DP_SZ, DP_SZ), order='C').squeeze()

    units = ['nm', 'nm', 'cm', 'cm']
    names = ['y', 'x', 'dy', 'dx']
    scales = [header['SY'], header['SX'], SDP, SDP]
    date, time, time_zone = serial_date_to_ISO_format(header['Acquisition_time'])
    metadata = {'General': {'original_filename': os.path.split(filename)[1],
                            'date': date,
                            'time': time,
                            'time_zone': time_zone,
                            'notes': header['Note']},
                "Signal": {'signal_type': "diffraction",
                           'record_by': 'image', },
                }
    # Create the axis objects for each axis
    dim = data.ndim
    axes = [
        {
            'size': data.shape[i],
            'index_in_array': i,
            'name': names[i],
            'scale': scales[i],
            'offset': 0.0,
            'units': units[i], }
        for i in range(dim)]

    dictionary = {'data': data,
                  'axes': axes,
                  'metadata': metadata,
                  'original_metadata': original_metadata,
                  'mapping': mapping, }

    f.close()
    return [dictionary, ]
Esempio n. 8
0
def file_reader(filename, endianess='<', mmap_mode=None,
                lazy=False, **kwds):
    _logger.debug("Reading blockfile: %s" % filename)
    metadata = {}
    if mmap_mode is None:
        mmap_mode = 'r' if lazy else 'c'
    # Makes sure we open in right mode:
    if '+' in mmap_mode or ('write' in mmap_mode and
                            'copyonwrite' != mmap_mode):
        if lazy:
            raise ValueError("Lazy loading does not support in-place writing")
        f = open(filename, 'r+b')
    else:
        f = open(filename, 'rb')
    _logger.debug("File opened")

    # Get header
    header = np.fromfile(f, dtype=get_header_dtype_list(endianess), count=1)
    if header['MAGIC'][0] not in magics:
        warnings.warn("Blockfile has unrecognized header signature. "
                      "Will attempt to read, but correcteness not guaranteed!")
    header = sarray2dict(header)
    note = f.read(header['Data_offset_1'] - f.tell())
    # It seems it uses "\x00" for padding, so we remove it
    try:
        header['Note'] = note.decode("latin1").strip("\x00")
    except BaseException:
        # Not sure about the encoding so, if it fails, we carry on
        _logger.warning(
            "Reading the Note metadata of this file failed. "
            "You can help improving "
            "HyperSpy by reporting the issue in "
            "https://github.com/hyperspy/hyperspy")
    _logger.debug("File header: " + str(header))
    NX, NY = int(header['NX']), int(header['NY'])
    DP_SZ = int(header['DP_SZ'])
    if header['SDP']:
        SDP = 100. / header['SDP']
    else:
        SDP = Undefined
    original_metadata = {'blockfile_header': header}

    # Get data:

    # A Virtual BF/DF is stored first
#    offset1 = header['Data_offset_1']
#    f.seek(offset1)
#    data_pre = np.array(f.read(NX*NY), dtype=endianess+'u1'
#        ).squeeze().reshape((NX, NY), order='C').T

    # Then comes actual blockfile
    offset2 = header['Data_offset_2']
    if not lazy:
        f.seek(offset2)
        data = np.fromfile(f, dtype=endianess + 'u1')
    else:
        data = np.memmap(f, mode=mmap_mode, offset=offset2,
                         dtype=endianess + 'u1')
    try:
        data = data.reshape((NY, NX, DP_SZ * DP_SZ + 6))
    except ValueError:
        warnings.warn(
            'Blockfile header dimensions larger than file size! '
            'Will attempt to load by zero padding incomplete frames.')
        # Data is stored DP by DP:
        pw = [(0, NX * NY * (DP_SZ * DP_SZ + 6) - data.size)]
        data = np.pad(data, pw, mode='constant')
        data = data.reshape((NY, NX, DP_SZ * DP_SZ + 6))

    # Every frame is preceeded by a 6 byte sequence (AA 55, and then a 4 byte
    # integer specifying frame number)
    data = data[:, :, 6:]
    data = data.reshape((NY, NX, DP_SZ, DP_SZ), order='C').squeeze()

    units = ['nm', 'nm', 'cm', 'cm']
    names = ['y', 'x', 'dy', 'dx']
    scales = [header['SY'], header['SX'], SDP, SDP]
    date, time, time_zone = serial_date_to_ISO_format(
        header['Acquisition_time'])
    metadata = {'General': {'original_filename': os.path.split(filename)[1],
                            'date': date,
                            'time': time,
                            'time_zone': time_zone,
                            'notes': header['Note']},
                "Signal": {'signal_type': "diffraction",
                           'record_by': 'image', },
                }
    # Create the axis objects for each axis
    dim = data.ndim
    axes = [
        {
            'size': data.shape[i],
            'index_in_array': i,
            'name': names[i],
            'scale': scales[i],
            'offset': 0.0,
            'units': units[i], }
        for i in range(dim)]

    dictionary = {'data': data,
                  'axes': axes,
                  'metadata': metadata,
                  'original_metadata': original_metadata,
                  'mapping': mapping, }

    f.close()
    return [dictionary, ]
Esempio n. 9
0
def file_reader(filename, endianess="<", mmap_mode=None, lazy=False, **kwds):
    _logger.debug("Reading blockfile: %s" % filename)
    metadata = {}
    if mmap_mode is None:
        mmap_mode = "r" if lazy else "c"
    # Makes sure we open in right mode:
    if "+" in mmap_mode or ("write" in mmap_mode
                            and "copyonwrite" != mmap_mode):
        if lazy:
            raise ValueError("Lazy loading does not support in-place writing")
        f = open(filename, "r+b")
    else:
        f = open(filename, "rb")
    _logger.debug("File opened")

    # Get header
    header = np.fromfile(f, dtype=get_header_dtype_list(endianess), count=1)
    if header["MAGIC"][0] not in magics:
        warnings.warn(
            "Blockfile has unrecognized header signature. "
            "Will attempt to read, but correcteness not guaranteed!",
            UserWarning,
        )
    header = sarray2dict(header)
    note = f.read(header["Data_offset_1"] - f.tell())
    # It seems it uses "\x00" for padding, so we remove it
    try:
        header["Note"] = note.decode("latin1").strip("\x00")
    except BaseException:
        # Not sure about the encoding so, if it fails, we carry on
        _logger.warning("Reading the Note metadata of this file failed. "
                        "You can help improving "
                        "HyperSpy by reporting the issue in "
                        "https://github.com/hyperspy/hyperspy")
    _logger.debug("File header: " + str(header))
    NX, NY = int(header["NX"]), int(header["NY"])
    DP_SZ = int(header["DP_SZ"])
    if header["SDP"]:
        SDP = 100.0 / header["SDP"]
    else:
        SDP = Undefined
    original_metadata = {"blockfile_header": header}

    # Get data:

    # TODO A Virtual BF/DF is stored first, may be loaded as navigator in future
    # offset1 = header['Data_offset_1']
    # f.seek(offset1)
    # navigator = np.fromfile(f, dtype=endianess+"u1", shape=(NX, NY)).T

    # Then comes actual blockfile
    offset2 = header["Data_offset_2"]
    if not lazy:
        f.seek(offset2)
        data = np.fromfile(f, dtype=endianess + "u1")
    else:
        data = np.memmap(f,
                         mode=mmap_mode,
                         offset=offset2,
                         dtype=endianess + "u1")
    try:
        data = data.reshape((NY, NX, DP_SZ * DP_SZ + 6))
    except ValueError:
        warnings.warn(
            "Blockfile header dimensions larger than file size! "
            "Will attempt to load by zero padding incomplete frames.")
        # Data is stored DP by DP:
        pw = [(0, NX * NY * (DP_SZ * DP_SZ + 6) - data.size)]
        data = np.pad(data, pw, mode="constant")
        data = data.reshape((NY, NX, DP_SZ * DP_SZ + 6))

    # Every frame is preceeded by a 6 byte sequence (AA 55, and then a 4 byte
    # integer specifying frame number)
    data = data[:, :, 6:]
    data = data.reshape((NY, NX, DP_SZ, DP_SZ), order="C").squeeze()

    units = ["nm", "nm", "cm", "cm"]
    names = ["y", "x", "dy", "dx"]
    scales = [header["SY"], header["SX"], SDP, SDP]
    date, time, time_zone = serial_date_to_ISO_format(
        header["Acquisition_time"])
    metadata = {
        "General": {
            "original_filename": os.path.split(filename)[1],
            "date": date,
            "time": time,
            "time_zone": time_zone,
            "notes": header["Note"],
        },
        "Signal": {
            "signal_type": "diffraction",
            "record_by": "image",
        },
    }
    # Create the axis objects for each axis
    dim = data.ndim
    axes = [{
        "size": data.shape[i],
        "index_in_array": i,
        "name": names[i],
        "scale": scales[i],
        "offset": 0.0,
        "units": units[i],
    } for i in range(dim)]

    dictionary = {
        "data": data,
        "axes": axes,
        "metadata": metadata,
        "original_metadata": original_metadata,
        "mapping": mapping,
    }

    f.close()
    return [
        dictionary,
    ]