def test__time_slice(self): time_slice = [.5, 5.6] * pq.s epoch2 = Epoch([0.6, 9.5, 16.8, 34.1] * pq.s, durations=[4.5, 4.8, 5.0, 5.0] * pq.s, t_start=.1 * pq.s) epoch2.annotate(epoch_type='b') epoch2.array_annotate(trial_id=[1, 2, 3, 4]) event = Event(times=[0.5, 10.0, 25.2] * pq.s, t_start=.1 * pq.s) event.annotate(event_type='trial start') event.array_annotate(trial_id=[1, 2, 3]) anasig = AnalogSignal(np.arange(50.0) * pq.mV, t_start=.1 * pq.s, sampling_rate=1.0 * pq.Hz) irrsig = IrregularlySampledSignal(signal=np.arange(50.0) * pq.mV, times=anasig.times, t_start=.1 * pq.s) st = SpikeTrain( np.arange(0.5, 50, 7) * pq.s, t_start=.1 * pq.s, t_stop=50.0 * pq.s, waveforms=np.array( [[[0., 1.], [0.1, 1.1]], [[2., 3.], [2.1, 3.1]], [[4., 5.], [4.1, 5.1]], [[6., 7.], [6.1, 7.1]], [[8., 9.], [8.1, 9.1]], [[12., 13.], [12.1, 13.1]], [[14., 15.], [14.1, 15.1]], [[16., 17.], [16.1, 17.1]]]) * pq.mV, array_annotations={'spikenum': np.arange(1, 9)}) seg = Segment() seg.epochs = [epoch2] seg.events = [event] seg.analogsignals = [anasig] seg.irregularlysampledsignals = [irrsig] seg.spiketrains = [st] block = Block() block.segments = [seg] block.create_many_to_one_relationship() # test without resetting the time sliced = seg.time_slice(time_slice[0], time_slice[1]) assert_neo_object_is_compliant(sliced) self.assertEqual(len(sliced.events), 1) self.assertEqual(len(sliced.spiketrains), 1) self.assertEqual(len(sliced.analogsignals), 1) self.assertEqual(len(sliced.irregularlysampledsignals), 1) self.assertEqual(len(sliced.epochs), 1) assert_same_attributes( sliced.spiketrains[0], st.time_slice(t_start=time_slice[0], t_stop=time_slice[1])) assert_same_attributes( sliced.analogsignals[0], anasig.time_slice(t_start=time_slice[0], t_stop=time_slice[1])) assert_same_attributes( sliced.irregularlysampledsignals[0], irrsig.time_slice(t_start=time_slice[0], t_stop=time_slice[1])) assert_same_attributes( sliced.events[0], event.time_slice(t_start=time_slice[0], t_stop=time_slice[1])) assert_same_attributes( sliced.epochs[0], epoch2.time_slice(t_start=time_slice[0], t_stop=time_slice[1])) seg = Segment() seg.epochs = [epoch2] seg.events = [event] seg.analogsignals = [anasig] seg.irregularlysampledsignals = [irrsig] seg.spiketrains = [st] block = Block() block.segments = [seg] block.create_many_to_one_relationship() # test with resetting the time sliced = seg.time_slice(time_slice[0], time_slice[1], reset_time=True) assert_neo_object_is_compliant(sliced) self.assertEqual(len(sliced.events), 1) self.assertEqual(len(sliced.spiketrains), 1) self.assertEqual(len(sliced.analogsignals), 1) self.assertEqual(len(sliced.irregularlysampledsignals), 1) self.assertEqual(len(sliced.epochs), 1) assert_same_attributes( sliced.spiketrains[0], st.time_shift(-time_slice[0]).time_slice(t_start=0 * pq.s, t_stop=time_slice[1] - time_slice[0])) anasig_target = anasig.copy() anasig_target = anasig_target.time_shift(-time_slice[0]).time_slice( t_start=0 * pq.s, t_stop=time_slice[1] - time_slice[0]) assert_same_attributes(sliced.analogsignals[0], anasig_target) irrsig_target = irrsig.copy() irrsig_target = irrsig_target.time_shift(-time_slice[0]).time_slice( t_start=0 * pq.s, t_stop=time_slice[1] - time_slice[0]) assert_same_attributes(sliced.irregularlysampledsignals[0], irrsig_target) assert_same_attributes( sliced.events[0], event.time_shift(-time_slice[0]).time_slice(t_start=0 * pq.s, t_stop=time_slice[1] - time_slice[0])) assert_same_attributes( sliced.epochs[0], epoch2.time_shift(-time_slice[0]).time_slice(t_start=0 * pq.s, t_stop=time_slice[1] - time_slice[0])) seg = Segment() reader = ExampleRawIO(filename='my_filename.fake') reader.parse_header() proxy_anasig = AnalogSignalProxy(rawio=reader, stream_index=0, inner_stream_channels=None, block_index=0, seg_index=0) seg.analogsignals.append(proxy_anasig) proxy_st = SpikeTrainProxy(rawio=reader, spike_channel_index=0, block_index=0, seg_index=0) seg.spiketrains.append(proxy_st) proxy_event = EventProxy(rawio=reader, event_channel_index=0, block_index=0, seg_index=0) seg.events.append(proxy_event) proxy_epoch = EpochProxy(rawio=reader, event_channel_index=1, block_index=0, seg_index=0) proxy_epoch.annotate(pick='me') seg.epochs.append(proxy_epoch) loaded_epoch = proxy_epoch.load() loaded_event = proxy_event.load() loaded_st = proxy_st.load() loaded_anasig = proxy_anasig.load() block = Block() block.segments = [seg] block.create_many_to_one_relationship() # test with proxy objects sliced = seg.time_slice(time_slice[0], time_slice[1]) assert_neo_object_is_compliant(sliced) sliced_event = loaded_event.time_slice(t_start=time_slice[0], t_stop=time_slice[1]) has_event = len(sliced_event) > 0 sliced_anasig = loaded_anasig.time_slice(t_start=time_slice[0], t_stop=time_slice[1]) sliced_st = loaded_st.time_slice(t_start=time_slice[0], t_stop=time_slice[1]) self.assertEqual(len(sliced.events), int(has_event)) self.assertEqual(len(sliced.spiketrains), 1) self.assertEqual(len(sliced.analogsignals), 1) self.assertTrue(isinstance(sliced.spiketrains[0], SpikeTrain)) assert_same_attributes(sliced.spiketrains[0], sliced_st) self.assertTrue(isinstance(sliced.analogsignals[0], AnalogSignal)) assert_same_attributes(sliced.analogsignals[0], sliced_anasig) if has_event: self.assertTrue(isinstance(sliced.events[0], Event)) assert_same_attributes(sliced.events[0], sliced_event)
def merge_datastores( datastores, root_directory, merge_recordings=True, merge_analysis=True, merge_stimuli=True, replace=False, ): """ This function takes a tuple of datastore in input and merge them into one single datastore which will be saved in root_directory. The type of data that should be merged can be controlled through the merge_recordings, merge_analysis and merge_stimuli booleans It returns this datastore as a Datastore object. """ merged_datastore = PickledDataStore( load=False, parameters=ParameterSet({ "root_directory": root_directory, "store_stimuli": merge_stimuli }), replace=replace, ) j = 0 # Here we check if sheets and neurons are the same in all datastores assert compare_sheets_datastores( datastores), "All datastores should contain the same sheets" assert compare_neurons_ids_datastores( datastores), "Neurons in the datastores should have the same ids" assert compare_neurons_position_datastores( datastores), "Neurons in the datastores should have the same position" assert compare_neurons_annotations_datastores( datastores ), "Neurons in the datastores should have the same annotations" if not os.path.isdir(root_directory): os.makedirs(root_directory) # Change the block annotations so that it gets the merged version of the experiment parameters merged_datastore.block.annotations = datastores[0].block.annotations merged_datastore.block.annotations[ "experiment_parameters"] = merge_experiment_parameters_datastores( datastores) j = 0 for datastore in datastores: # Merge the recording of all the datastores if this flag is set to true if merge_recordings: segments = datastore.get_segments() segments += datastore.get_segments(null=True) for seg in segments: for s in merged_datastore.get_segments(): if seg.annotations == s.annotations and seg.null == s.null: print( "Warning: A segment with the same parametrization was already added in the datastore.: %s" % (seg.annotations)) raise ValueError( "A segment with the same parametrization was already added in the datastore already added in the datastore. Currently uniqueness is required. User should check what caused this and modify his simulations to avoid this!: %s \n %s" % (str(seg.annotations), str(s.annotations))) # Load the full segment and adds it to the merged datastore if not seg.full: seg.load_full() merged_datastore.block.segments.append( PickledDataStoreNeoWrapper(seg, "Segment" + str(j), root_directory, null=seg.null)) merged_datastore.stimulus_dict[ seg.annotations["stimulus"]] = True # Create a new pickle file for this mozaik segment and store a corresponding neo segment there f = open(root_directory + "/" + "Segment" + str(j) + ".pickle", "wb") s = Segment(description=seg.description, file_origin=seg.file_origin, file_datetime=seg.file_datetime, rec_datetime=seg.rec_datetime, index=seg.index, **seg.annotations) s.spiketrains = seg.spiketrains s.analogsignals = seg.analogsignals pickle.dump(s, f) # Release each segment once it has been added to the merged datastore to save memory seg.release() j = j + 1 # Merge the analysis of all the datastores if this flag is set to true if merge_analysis: adss = datastore.get_analysis_result() for ads in adss: merged_datastore.add_analysis_result(ads) # Merge the stimuli all the datastores if this flag is set to true if merge_stimuli: for key, value in datastore.sensory_stimulus.items(): merged_datastore.sensory_stimulus[key] = value return merged_datastore