Exemple #1
0
 def test_errors(self):
     """ some tests for specific errors """
     f = open("thisisafakehdf.h5", "w")  # wrong file type
     f.write("this is not an HDF5 file. sorry.")
     f.close()
     self.assertRaises(TypeError, NeoHdf5IO(filename="thisisafakehdf.h5"))
     iom = NeoHdf5IO(filename=self.test_file)  # wrong object path test
     self.assertRaises(LookupError, iom.get("/wrong_path"))
     some_object = np.array([1, 2, 3])  # non NEO object test
     self.assertRaises(AssertionError, iom.save(some_object))
Exemple #2
0
    def test_relations(self):
        """ make sure the change in relationships is saved properly in the file,
        including correct M2M, no redundancy etc. RC -> RCG not tested."""
        def assert_children(self, obj, replica):
            obj_type = name_by_class[obj]
            self.assertEqual(
                md5(str(obj)).hexdigest(),
                md5(str(replica)).hexdigest())
            if one_to_many_relationship.has_key(obj_type):
                rels = one_to_many_relationship[obj_type]
                if obj_type == "RecordingChannelGroup":
                    rels += many_to_many_relationship[obj_type]
                for child_type in rels:
                    ch1 = getattr(obj, child_type.lower() + "s")
                    ch2 = getattr(replica, child_type.lower() + "s")
                    self.assertEqual(len(ch1), len(ch2))
                    for i, v in enumerate(ch1):
                        self.assert_children(ch1[i], ch2[i])

        iom = NeoHdf5IO(filename=self.test_file)
        for obj_type in class_by_name.keys():
            obj = fake_NEO(obj_type, cascade=True)
            iom.save(obj)
            self.assertTrue(hasattr(obj, "hdf5_path"))
            replica = iom.get(obj.hdf5_path, cascade=True)
            self.assert_children(obj, replica)
Exemple #3
0
 def test_property_change(self):
     """ Make sure all attributes are saved properly after the change,
     including quantities, units, types etc."""
     iom = NeoHdf5IO(filename=self.test_file)
     for obj_type in class_by_name.keys():
         obj = fake_NEO(obj_type, cascade=False)
         iom.save(obj)
         self.assertTrue(hasattr(obj, "hdf5_path"))
         replica = iom.get(obj.hdf5_path, cascade=False)
         assert_objects_equivalent(obj, replica)
Exemple #4
0
 def __init__(self):
     # Initialize for loading the spike train data from storage
     collab_path = '/3653'
     client = get_bbp_client().document
     # Load NEST data using NeoHdf5I0
     store_path = "./local"
     client.download_file(collab_path + '/' + "spikes_L6I_spinnaker.h5",
                          store_path + "spikes_L6I_spinnaker.h5")
     data = NeoHdf5IO(store_path + "spikes_L6I_spinnaker.h5")
     self.spiketrains = data.read_block().list_children_by_class(SpikeTrain)
Exemple #5
0
    def test_store_empty_spike_train(self):
        spiketrain0 = SpikeTrain([], t_start=0.0, t_stop=100.0, units="ms")
        spiketrain1 = SpikeTrain([23.4, 45.6, 67.8],
                                 t_start=0.0, t_stop=100.0, units="ms")
        segment = Segment(name="a_segment")
        segment.spiketrains.append(spiketrain0)
        segment.spiketrains.append(spiketrain1)
        block = Block(name="a_block")
        block.segments.append(segment)
        iom = NeoHdf5IO(filename="test987.h5")
        iom.save(block)
        iom.close()

        iom = NeoHdf5IO(filename="test987.h5")
        block1 = iom.get("/Block_0")
        self.assertEqual(block1.segments[0].spiketrains[0].t_stop, 100.0)
        self.assertEqual(len(block1.segments[0].spiketrains[0]), 0)
        self.assertEqual(len(block1.segments[0].spiketrains[1]), 3)
        iom.close()
        os.remove("test987.h5")
Exemple #6
0
 def test_create(self):
     """
     Create test file with signals, segments, blocks etc.
     """
     iom = NeoHdf5IO(filename=self.test_file)
     b1 = fake_NEO()  # creating a structure
     iom.save(b1)  # saving
     self.assertTrue(hasattr(b1,
                             "hdf5_path"))  # must be assigned after save
     iom.close()
     iom.connect(filename=self.test_file)
     b2 = iom.get(b1.hdf5_path)  # new object
     assert_neo_object_is_compliant(b2)
     assert_same_sub_schema(b1, b2)
Exemple #7
0
 def test_attr_changes(self):
     """ gets an object, changes its attributes, saves it, then compares how
     good the changes were saved. """
     iom = NeoHdf5IO(filename=self.test_file)
     for obj_type in objectnames:
         obj = fake_neo(obj_type=obj_type, cascade=False)
         iom.save(obj)
         orig_obj = iom.get(obj.hdf5_path)
         for attr in obj._all_attrs:
             if hasattr(orig_obj, attr[0]):
                 setattr(obj, attr[0], get_fake_value(*attr))
         iom.save(orig_obj)
         test_obj = iom.get(orig_obj.hdf5_path)
         assert_objects_equivalent(orig_obj, test_obj)
Exemple #8
0
 def test_attr_changes(self):
     """ gets an object, changes its attributes, saves it, then compares how
     good the changes were saved. """
     iom = NeoHdf5IO(filename=self.test_file)
     for obj_type in class_by_name.keys():
         obj = fake_NEO(obj_type=obj_type, cascade=False)
         iom.save(obj)
         orig_obj = iom.get(obj.hdf5_path)
         attrs = classes_necessary_attributes[
             obj_type] + classes_recommended_attributes[obj_type]
         for attr in attrs:
             if hasattr(orig_obj, attr[0]):
                 setattr(obj, attr[0], get_fake_value(attr))
         iom.save(orig_obj)
         test_obj = iom.get(orig_obj.hdf5_path)
         assert_objects_equivalent(orig_obj, test_obj)
Exemple #9
0
    def load(self):
        # load the data
        iom = NeoHdf5IO(filename=self.parameters.root_directory + "/datastore.hdf5")
        self.block = iom.get("/block_0")

        # re-wrap segments
        new = []
        for s in self.block.segments:
            new.append(MozaikSegment(s))

        self.block.segments = new

        # now just construct the stimulus dictionary
        for s in self.block.segments:
            self.stimulus_dict[s.stimulus] = True

        self.analysis_results = load_pickle_crosscompat(self.parameters.root_directory + "/datastore.analysis.pickle")
Exemple #10
0
    def save(self):
        # we need to first unwrap segments from MozaikWrapper
        old = self.block.segments[:]
        self.block.segments = []
        for s in old:
            self.block.segments.append(s.original_segment)

        # save the recording itself
        iom = NeoHdf5IO(filename=self.parameters.root_directory + "/datastore.hdf5")
        iom.write_block(self.block)

        # put back wrapped segments
        self.block.segments = old

        f = open(self.parameters.root_directory + "/datastore.analysis.pickle", "wb")
        # pickle.dump(self.analysis_results, f)
        cPickle.dump(self.analysis_results, f)
        f.close()
Exemple #11
0
 def test_relations(self):
     """
     make sure the change in relationships is saved properly in the file,
     including correct M2M, no redundancy etc. RC -> RCG not tested.
     """
     def assert_children(self, obj, replica):
         obj_type = obj.__name__
         self.assertEqual(md5(str(obj)).hexdigest(),
                          md5(str(replica)).hexdigest())
         for container in getattr(obj, '_child_containers', []):
             ch1 = getattr(obj, container)
             ch2 = getattr(replica, container)
             self.assertEqual(len(ch1), len(ch2))
             for i, v in enumerate(ch1):
                 self.assert_children(ch1[i], ch2[i])
     iom = NeoHdf5IO(filename=self.test_file)
     for obj_type in objectnames:
         obj = fake_neo(obj_type, cascade=True)
         iom.save(obj)
         self.assertTrue(hasattr(obj, "hdf5_path"))
         replica = iom.get(obj.hdf5_path, cascade=True)
         self.assert_children(obj, replica)
        for key in pos_group:
            spot_group = pos_group[key]
            times = spot_group["timestamps"]
            coords = spot_group["data"]
            irr_signal = IrregularlySampledSignal(
                name=pos_group[key].name,
                signal=coords.data,
                times=times.data,
                units=coords.attrs["unit"],
                time_units=times.attrs["unit"])
            irr_signals.append(irr_signal)
        return irr_signals


if __name__ == "__main__":
    import sys
    testfile = "/tmp/test.exdir"
    io = ExdirIO(testfile)

    block = io.read_block()

    from neo.io.hdf5io import NeoHdf5IO

    testfile = "/tmp/test_exdir_to_neo.h5"
    try:
        os.remove(testfile)
    except:
        pass
    hdf5io = NeoHdf5IO(testfile)
    hdf5io.write(block)
    def test_read_with_merge(self):
        test_file = get_test_file_full_path(self.ioclass,
                                            filename=self.files_to_test[0],
                                            directory=self.local_test_dir,
                                            clean=False)
        io = NeoHdf5IO(test_file)
        blocks = io.read_all_blocks(merge_singles=True)

        # general tests, true for both blocks
        for block in blocks:
            for segment in block.segments:
                self.assertEqual(segment.block, block)

        # tests of Block #1, which is constructed from "array" (multi-channel)
        # objects, so should be straightforward to convert to the version 0.5 API
        block0 = blocks[0]
        self.assertEqual(block0.name, "block1")
        self.assertEqual(block0.index, 1234)
        self.assertEqual(block0.annotations["foo"], "bar")
        self.assertEqual(len(block0.segments), 3)
        for segment in block0.segments:

            self.assertEqual(len(segment.analogsignals), 2)
            as0 = segment.analogsignals[0]
            self.assertEqual(as0.shape, (1000, 4))
            self.assertEqual(as0.sampling_rate, 1 * kHz)
            self.assertEqual(as0.units, mV)
            self.assertEqual(as0.segment, segment)

            self.assertEqual(len(segment.spiketrains), 4)
            st = segment.spiketrains[-1]
            self.assertEqual(st.units, ms)
            self.assertEqual(st.t_stop, 1000 * ms)
            self.assertEqual(st.t_start, 0 * ms)
            self.assertEqual(st.segment, segment)

            self.assertEqual(len(segment.events), 1)
            ev = segment.events[0]
            assert_array_equal(ev.labels, np.array(['trig0', 'trig1',
                                                    'trig2']))
            self.assertEqual(ev.units, second)
            assert_array_equal(ev.magnitude, np.arange(0, 30, 10))
            self.assertEqual(ev.segment, segment)

            self.assertEqual(len(segment.epochs), 1)
            ep = segment.epochs[0]
            assert_array_equal(ep.labels, np.array(['btn0', 'btn1', 'btn2']))
            assert_array_equal(ep.durations.magnitude, np.array([10, 5, 7]))
            self.assertEqual(ep.units, second)
            assert_array_equal(ep.magnitude, np.arange(0, 30, 10))
            self.assertEqual(ep.segment, segment)

            self.assertEqual(len(segment.irregularlysampledsignals), 2)
            iss0 = segment.irregularlysampledsignals[0]
            self.assertEqual(iss0.shape, (3, 2))
            assert_array_equal(iss0.times, [0.01, 0.03, 0.12] * second)
            assert_array_equal(iss0.magnitude,
                               np.array([[4, 3], [5, 4], [6, 3]]))
            self.assertEqual(iss0.units, nA)
            self.assertEqual(iss0.segment, segment)

            iss1 = segment.irregularlysampledsignals[1]
            self.assertEqual(iss1.shape, (3, 1))
            assert_array_equal(iss1.times, [0.02, 0.05, 0.15] * second)
            self.assertEqual(iss1.units, nA)
            assert_array_equal(iss1.magnitude, np.array([[3], [4], [3]]))

        # tests of Block #2, which is constructed from "singleton"
        # (single-channel) objects, so is potentially tricky to convert to the
        # version 0.5 API
        block1 = blocks[1]
        self.assertEqual(block1.name, "block2")

        for segment in block1.segments:
            self.assertEqual(len(segment.analogsignals), 2)
            as0 = segment.analogsignals[0]
            self.assertEqual(as0.shape, (1000, 4))
            self.assertEqual(as0.sampling_rate, 1 * kHz)
            self.assertEqual(as0.units, mV)
            self.assertEqual(as0.segment, segment)

            self.assertEqual(len(segment.spiketrains), 7)
            st = segment.spiketrains[-1]
            self.assertEqual(st.units, ms)
            self.assertEqual(st.t_stop, 1000 * ms)
            self.assertEqual(st.t_start, 0 * ms)
            self.assertEqual(st.segment, segment)

            self.assertEqual(len(segment.events), 0)
            self.assertEqual(len(segment.epochs), 0)

        self.assertEqual(len(block1.channel_indexes), 3)

        ci0 = block1.channel_indexes[0]
        self.assertEqual(ci0.name, "electrode1")
        self.assertEqual(len(ci0.analogsignals), 1)
        as00 = ci0.analogsignals[0]
        self.assertEqual(as00.segment, segment)
        self.assertEqual(as00.shape, (1000, 4))
        self.assertEqual(id(as00), id(segment.analogsignals[0]))
        self.assertEqual(as00.mean(), segment.analogsignals[0].mean())
        self.assertEqual(as00.channel_index, ci0)
        assert_array_equal(ci0.index, np.array([0, 1, 2, 3]))
        assert_array_equal(ci0.channel_ids, np.array([0, 1, 2, 3]))
        self.assertEqual(len(ci0.units), 2)
        self.assertEqual(len(ci0.units[0].spiketrains), 2)
        self.assertEqual(id(ci0.units[0].spiketrains[0]),
                         id(block1.segments[0].spiketrains[0]))
        self.assertEqual(id(ci0.units[0].spiketrains[1]),
                         id(block1.segments[1].spiketrains[0]))
        self.assertEqual(id(ci0.units[1].spiketrains[0]),
                         id(block1.segments[0].spiketrains[1]))

        ci1 = block1.channel_indexes[1]
        self.assertEqual(ci1.name, "electrode2")
        self.assertEqual(len(ci1.analogsignals), 1)
        as10 = ci1.analogsignals[0]
        self.assertEqual(as10.segment, segment)
        self.assertEqual(as10.shape, (1000, 4))
        self.assertEqual(id(as10), id(segment.analogsignals[1]))
        self.assertEqual(as10.mean(), segment.analogsignals[1].mean())
        self.assertEqual(as10.channel_index, ci1)
        assert_array_equal(ci1.index, np.array([0, 1, 2, 3]))
        assert_array_equal(ci1.channel_ids, np.array([4, 5, 6, 7]))
        self.assertEqual(len(ci1.units), 5)
        self.assertEqual(id(ci1.units[0].spiketrains[0]),
                         id(block1.segments[0].spiketrains[2]))
        self.assertEqual(id(ci1.units[3].spiketrains[1]),
                         id(block1.segments[1].spiketrains[5]))

        ci2 = block1.channel_indexes[2]
        self.assertEqual(ci2.name, "my_favourite_channels")
        self.assertEqual(len(ci2.analogsignals), 1)
        self.assertEqual(id(ci2.analogsignals[0]), id(as00))
        assert_array_equal(ci2.index, np.array([1, 3]))
        assert_array_equal(ci2.channel_ids, np.array([1, 3]))
Exemple #14
0
                    # TODO what if read_analogsignal is called twice? The channel_index list should be cleared at some point
                    channel_index = self._channel_to_channel_index[
                        eeg_original_channel_id]
                    channel_index.analogsignals.append(analog_signal)

                analog_signals.append(analog_signal)

        return analog_signals


if __name__ == "__main__":
    import sys
    # import quantities
    io = AxonaIO(sys.argv[1])
    # io.read_analogsignal()
    # io.read_spiketrain()
    # io.read_spiketrainlist()
    # io.read_tracking()
    block = io.read_block()

    from neo.io.hdf5io import NeoHdf5IO

    testfile = "/tmp/test.h5"
    try:
        os.remove("/tmp/test.h5")
    except:
        pass
    hdf5io = NeoHdf5IO("/tmp/test.h5")
    hdf5io.write(block)