コード例 #1
0
    def test_read_head_folder_and_write_h5(self):
        head = self._prepare_dummy_head()
        path = self.writer.write_tvb_to_h5(head, recursive=True)

        head2 = Head.from_file(path)
        assert np.max(
            np.abs(head.connectivity.weights -
                   head2.connectivity.weights)) < 1e-6
        assert np.max(
            np.abs(head.connectivity.tract_lengths -
                   head2.connectivity.tract_lengths)) < 1e-6
        assert np.max(
            np.abs(head.surface.vertices - head2.surface.vertices)) < 1e-6
        assert np.max(
            np.abs(head.surface.triangles - head2.surface.triangles)) < 1e-6
        assert np.max(
            np.abs(head.eeg_sensors.locations -
                   head2.eeg_sensors.locations)) < 1e-6
        assert np.max(
            np.abs(head.eeg_projection.projection_data -
                   head2.eeg_projection.projection_data)) < 1e-6

        # The following will not work as long as Head references tvb-scripts classes instead of TVB ones
        from tvb.core.neocom import h5
        head3 = h5.load(path, with_references=True)
        assert np.max(
            np.abs(head.connectivity.weights -
                   head3.connectivity.weights)) < 1e-6
        assert np.max(
            np.abs(head.connectivity.tract_lengths -
                   head3.connectivity.tract_lengths)) < 1e-6
        assert np.max(
            np.abs(head.surface.vertices - head3.surface.vertices)) < 1e-6
        assert np.max(
            np.abs(head.surface.triangles - head3.surface.triangles)) < 1e-6
        assert np.max(
            np.abs(head.eeg_sensors.locations -
                   head3.eeg_sensors.locations)) < 1e-6
        assert np.max(
            np.abs(head.eeg_projection.projection_data -
                   head3.eeg_projection.projection_data)) < 1e-6
コード例 #2
0
Demo script on how to use tvb-framework default read/write capabilities

.. moduleauthor:: Lia Domide <*****@*****.**>
"""

from tvb.core.neocom import h5
from tvb.basic.profile import TvbProfile
from tvb.datatypes.connectivity import Connectivity
from tvb.adapters.datatypes.h5.connectivity_h5 import ConnectivityH5

if __name__ == '__main__':
    TvbProfile.set_profile(TvbProfile.COMMAND_PROFILE)

    # Read from a ZIP
    conn_ht = Connectivity.from_file()
    conn_ht.configure()

    # Store in a given folder the HasTraits entity
    PATH = "."
    h5.store_complete(conn_ht, PATH)

    # Reproduce the just written file name containing GUID
    file_name = h5.path_for(PATH, ConnectivityH5, conn_ht.gid)

    # Load back from a file name a HasTraits instance
    conn_back = h5.load(file_name)

    # Check that the loaded and written entities are correct
    assert conn_ht.number_of_regions == 76
    assert conn_ht.number_of_regions == conn_back.number_of_regions
コード例 #3
0
ファイル: neocom_test.py プロジェクト: PaulPawletta/tvb-root
def test_store_load(tmpdir, connectivity_factory):
    path = os.path.join(str(tmpdir), 'interface.conn.h5')
    connectivity = connectivity_factory(2)
    store(connectivity, path)
    con2 = load(path)
    numpy.testing.assert_equal(connectivity.weights, con2.weights)
コード例 #4
0
        operation_gid = tvb_client.launch_operation(project_gid,
                                                    algo_dto.module,
                                                    algo_dto.classname,
                                                    fourier_model)
        logger.info(
            "Fourier Analyzer operation has launched with gid {}".format(
                operation_gid))

        logger.info("Download the connectivity file...")
        connectivity_path = tvb_client.retrieve_datatype(
            connectivity_gid, tvb_client.temp_folder)
        logger.info(
            "The connectivity file location is: {}".format(connectivity_path))

        logger.info("Loading an entire Connectivity datatype in memory...")
        connectivity = h5.load(connectivity_path)
        logger.info("Info on current Connectivity: {}".format(
            connectivity.summary_info()))

        logger.info(
            "Loading a chuck from the time series H5 file, as this can be very large..."
        )
        with TimeSeriesH5(time_series_path) as time_series_h5:
            data_shape = time_series_h5.read_data_shape()
            chunk = time_series_h5.read_data_slice(
                tuple([
                    slice(20),
                    slice(data_shape[1]),
                    slice(data_shape[2]),
                    slice(data_shape[3])
                ]))