def test_multiple_files(self): filename1 = os.path.dirname(os.path.realpath(__file__)) filename1 = os.path.join(filename1, 'testDataFiles', 'chimera_nonoise_2events_1levels.log') filename2 = os.path.dirname(os.path.realpath(__file__)) filename2 = os.path.join(filename2, 'testDataFiles', 'chimera_nonoise_1event_2levels.log') file_names = [filename1, filename2] event_databases = find_events(file_names, save_file_names=[ '_testMultipleFiles_1_9238.h5', '_testMultipleFiles_2_9238.h5' ]) self.assertEqual(len(event_databases), 2) h5file = ed.open_file(event_databases[0], mode='r') self._test_chimera_no_noise_2events_1levels_wrapper(h5file) h5file.close() os.remove(event_databases[0]) h5file = ed.open_file(event_databases[1], mode='r') self._test_chimera_no_noise_1event_2levels_helper(h5file) h5file.close() os.remove(event_databases[1])
def test_debug_option(self): filename = os.path.dirname(os.path.realpath(__file__)) filename = tf.get_abs_path('chimera_nonoise_2events_1levels.log') output_filename = '_test_debug_option.h5' reader = get_reader_from_filename(filename) data = [reader] event_databases = find_events(data, save_file_names=[output_filename], debug=True) data = reader.get_all_data()[0] reader.close() self.assertEqual(len(event_databases), 1) h5file = ed.open_file(event_databases[0], mode='r') self._test_chimera_no_noise_2events_1levels_wrapper(h5file) # check that the file has the correct groups groups = [x._v_name for x in h5file.walk_groups()] self.assertIn('debug', groups, "No debug group in debug file.") print "data:", h5file.root.debug.data[:] np.testing.assert_array_equal(data, h5file.root.debug.data[0][:]) h5file.close() os.remove(event_databases[0])
def test_good_thresholds(self, filename): """ Tests that we find the correct number of events when the starting and ending thresholds are appropriate. """ data_file = tf.get_abs_path('chimera_1event.log') parameters = Parameters(threshold_strategy=AbsoluteChangeThresholdStrategy(2., 1.)) event_databases = find_events([data_file], parameters=parameters, save_file_names=[filename], debug=True) h5file = ed.open_file(filename, mode='r') #Check the number of events event_count = h5file.get_event_count() self.assertEqual(event_count, 1, "Unexpected event count. Should be {0}, was {1}.".format(event_count, 0)) #Check the event length sample_rate = h5file.get_sample_rate() event_length = h5file.get_event_row(0)['event_length']/sample_rate event_length_should_be = 0.00024 percent_diff = abs(event_length - event_length_should_be) / event_length_should_be self.assertLessEqual(percent_diff, 0.05, "Unexpected event length. Should be {0}, was {1}.".format(event_length_should_be, event_length)) h5file.close()
def test_good_thresholds(self, filename): """ Tests that we find the correct number of events when the starting and ending thresholds are appropriate. """ data_file = tf.get_abs_path('chimera_1event.log') parameters = Parameters( threshold_strategy=AbsoluteChangeThresholdStrategy(2., 1.)) event_databases = find_events([data_file], parameters=parameters, save_file_names=[filename], debug=True) h5file = ed.open_file(filename, mode='r') #Check the number of events event_count = h5file.get_event_count() self.assertEqual( event_count, 1, "Unexpected event count. Should be {0}, was {1}.".format( event_count, 0)) #Check the event length sample_rate = h5file.get_sample_rate() event_length = h5file.get_event_row(0)['event_length'] / sample_rate event_length_should_be = 0.00024 percent_diff = abs(event_length - event_length_should_be) / event_length_should_be self.assertLessEqual( percent_diff, 0.05, "Unexpected event length. Should be {0}, was {1}.".format( event_length_should_be, event_length)) h5file.close()
def test_multiple_files(self): filename1 = tf.get_abs_path('chimera_nonoise_2events_1levels.log') filename2 = tf.get_abs_path('chimera_nonoise_1event_2levels.log') file_names = [filename1, filename2] event_databases = find_events(file_names, save_file_names=['_testMultipleFiles_1_9238.h5', '_testMultipleFiles_2_9238.h5']) self.assertEqual(len(event_databases), 2) h5file = ed.open_file(event_databases[0], mode='r') self._test_chimera_no_noise_2events_1levels_wrapper(h5file) h5file.close() os.remove(event_databases[0]) h5file = ed.open_file(event_databases[1], mode='r') self._test_chimera_no_noise_1event_2levels_helper(h5file) h5file.close() os.remove(event_databases[1])
def _filter_wrap(tup): # returns the output filename filename = tup[0] print "Starting file:", filename ed = open_file(filename, mode='r') output_filename = filename[:-3] + '.csv' ed.to_csv(output_filename) print "Done converting input {0} into output {1}.".format( tup[0], output_filename) return output_filename
def test_chimera_no_noise_1event_2levels(self): filename = tf.get_abs_path('chimera_nonoise_1event_2levels.log') event_database = find_events([filename], save_file_names=['_testChimera_nonoise_1Event_2Levels_9238.h5'])[0] h5file = ed.open_file(event_database, mode='r') self._test_chimera_no_noise_1event_2levels_helper(h5file) h5file.close() # delete the newly created event file os.remove(event_database)
def test_chimera_no_noise_1event_2levels(self): filename = os.path.dirname(os.path.realpath(__file__)) filename = os.path.join(filename, "testDataFiles", "chimera_nonoise_1event_2levels.log") event_database = find_events([filename], save_file_names=["_testChimera_nonoise_1Event_2Levels_9238.h5"])[0] h5file = ed.open_file(event_database, mode="r") self._test_chimera_no_noise_1event_2levels_helper(h5file) h5file.close() # delete the newly created event file os.remove(event_database)
def test_open_existing_empty_database(self): """ Tests opening an existing h5 file with an empty EventsDatabase structure. """ self.database.close() self.database = eD.open_file(self.filename) self._test_initial_root(self.database) self._test_empty_events_group()
def test_chimera_no_noise_1event_2levels(self): filename = tf.get_abs_path('chimera_nonoise_1event_2levels.log') event_database = find_events( [filename], save_file_names=['_testChimera_nonoise_1Event_2Levels_9238.h5'])[0] h5file = ed.open_file(event_database, mode='r') self._test_chimera_no_noise_1event_2levels_helper(h5file) h5file.close() # delete the newly created event file os.remove(event_database)
def test_multiple_files(self): filename1 = os.path.dirname(os.path.realpath(__file__)) filename1 = os.path.join(filename1, "testDataFiles", "chimera_nonoise_2events_1levels.log") filename2 = os.path.dirname(os.path.realpath(__file__)) filename2 = os.path.join(filename2, "testDataFiles", "chimera_nonoise_1event_2levels.log") file_names = [filename1, filename2] event_databases = find_events( file_names, save_file_names=["_testMultipleFiles_1_9238.h5", "_testMultipleFiles_2_9238.h5"] ) self.assertEqual(len(event_databases), 2) h5file = ed.open_file(event_databases[0], mode="r") self._test_chimera_no_noise_2events_1levels_wrapper(h5file) h5file.close() os.remove(event_databases[0]) h5file = ed.open_file(event_databases[1], mode="r") self._test_chimera_no_noise_1event_2levels_helper(h5file) h5file.close() os.remove(event_databases[1])
def test_open_existing_full_database(self): """ Tests opening an existing h5 file with events in the EventDatabase structure. """ # Add to the raw_data matrix raw_data = np.zeros((2, self.max_event_length)) raw_data[1][:] += 1 self.database.append_raw_data(raw_data) # Add to the levels matrix levels = raw_data + 1 self.database.append_levels(levels) # Add to the level_lengths matrix level_lengths = raw_data + 2 self.database.append_level_lengths(level_lengths) # Add to the event table event_row = self.database.get_event_table_row() event_row['array_row'] = 1 event_row['event_start'] = 2 event_row['event_length'] = 3 event_row['n_levels'] = 4 event_row['raw_points_per_side'] = 5 event_row['baseline'] = 6 event_row['current_blockage'] = 7 event_row['area'] = 8 event_row.append() # Close the file self.database.flush() self.database.close() # Open the existing file self.database = eD.open_file(self.filename, mode='r') # Check the raw_data matrix npt.assert_array_equal(raw_data, self.database.root.events.raw_data[:]) # Check the levels matrix npt.assert_array_equal(levels, self.database.root.events.levels[:]) # Check the level_lengths matrix npt.assert_array_equal(level_lengths, self.database.root.events.level_lengths[:]) # Check the eventTable row = self.database.root.events.eventTable[0] self.assertEqual(row['array_row'], 1) self.assertEqual(row['area'], 8)
def test_initialize_events_database_with_debug(self): filename = 'test_initialize_events_database_with_debug.h5' if os.path.exists(filename): os.remove(filename) n_points = 100 n_channels = 2 database = eD.open_file(filename, mode='w', debug=True, n_points=n_points, n_channels=n_channels) self._test_initial_root(database) self._test_empty_events_group(events_group=database.root.events) # Make sure is debug self.assertTrue(database.is_debug()) # Make sure the debug group is there. names = [x._v_name for x in database.walk_groups()] self.assertIn('debug', names, 'No debug group.') debug_group = database.root.debug # Make sure the debug group debug_group_names = [ x._v_name for x in database.walk_nodes(debug_group) ] print debug_group_names self.assertIn('data', debug_group_names, 'No data matrix.') self.assertIn('baseline', debug_group_names, 'No baseline matrix.') self.assertIn('threshold_positive', debug_group_names, 'No positive threshold matrix.') self.assertIn('threshold_negative', debug_group_names, 'No negative threshold matrix.') # Make sure arrays are correct dimensions self.assertEqual(debug_group.data.shape, (n_channels, n_points)) self.assertEqual(debug_group.baseline.shape, (n_channels, n_points)) self.assertEqual(debug_group.threshold_positive.shape, (n_channels, n_points)) self.assertEqual(debug_group.threshold_negative.shape, (n_channels, n_points)) # Check is in write mode database.root.events.raw_data.append(np.zeros((1, 100))) database.flush() database.close() os.remove(filename)
def test_chimera_no_noise_1event_2levels(self): filename = os.path.dirname(os.path.realpath(__file__)) filename = os.path.join(filename, 'testDataFiles', 'chimera_nonoise_1event_2levels.log') event_database = find_events( [filename], save_file_names=['_testChimera_nonoise_1Event_2Levels_9238.h5'])[0] h5file = ed.open_file(event_database, mode='r') self._test_chimera_no_noise_1event_2levels_helper(h5file) h5file.close() # delete the newly created event file os.remove(event_database)
def test_chimera_no_noise_2events_1levels(self): filename = tf.get_abs_path('chimera_nonoise_2events_1levels.log') event_databases = find_events([filename], save_file_names=['_testChimera_nonoise_2events_1levels_9238.h5']) self.assertEqual(len(event_databases), 1) event_database = event_databases[0] h5file = ed.open_file(event_database, mode='r') self._test_chimera_no_noise_2events_1levels_wrapper(h5file) h5file.close() # delete the newly created event file os.remove(event_database)
def test_saving_files(self): filename = tf.get_abs_path('chimera_1event.log') event_database = find_events([filename], save_file_names=['_testSavingFiles_9238.h5'])[0] self.assertTrue(os.path.isfile(event_database)) h5file = ed.open_file(event_database, mode='r') self.assertTrue(h5file.isopen) h5file.close() # delete the newly created event file os.remove(event_database)
def test_saving_files(self): filename = os.path.dirname(os.path.realpath(__file__)) filename = os.path.join(filename, "testDataFiles", "chimera_1event.log") event_database = find_events([filename], save_file_names=["_testSavingFiles_9238.h5"])[0] self.assertTrue(os.path.isfile(event_database)) h5file = ed.open_file(event_database, mode="r") self.assertTrue(h5file.isopen) h5file.close() # delete the newly created event file os.remove(event_database)
def test_chimera_no_noise_2events_1levels(self): filename = tf.get_abs_path('chimera_nonoise_2events_1levels.log') event_databases = find_events( [filename], save_file_names=['_testChimera_nonoise_2events_1levels_9238.h5']) self.assertEqual(len(event_databases), 1) event_database = event_databases[0] h5file = ed.open_file(event_database, mode='r') self._test_chimera_no_noise_2events_1levels_wrapper(h5file) h5file.close() # delete the newly created event file os.remove(event_database)
def test_saving_files(self): filename = tf.get_abs_path('chimera_1event.log') event_database = find_events( [filename], save_file_names=['_testSavingFiles_9238.h5'])[0] self.assertTrue(os.path.isfile(event_database)) h5file = ed.open_file(event_database, mode='r') self.assertTrue(h5file.isopen) h5file.close() # delete the newly created event file os.remove(event_database)
def test_too_large_end_threshold(self, filename): """ Tests that we don't find events when the ending threshold is too large. """ data_file = tf.get_abs_path('chimera_1event.log') parameters = Parameters(threshold_strategy=AbsoluteChangeThresholdStrategy(2., 1000.)) event_databases = find_events([data_file], parameters=parameters, save_file_names=[filename], debug=True) h5file = ed.open_file(filename, mode='r') event_count = h5file.get_event_count() self.assertEqual(event_count, 0, "Unexpected event count. Should be {0}, was {1}.".format(event_count, 0)) h5file.close()
def test_passing_reader(self): """ Tests that passing an open subtype of :py:class:`pypore.i_o.abstract_reader.AbstractReader` works. """ filename = os.path.dirname(os.path.realpath(__file__)) filename = os.path.join(filename, "testDataFiles", "chimera_nonoise_2events_1levels.log") reader = get_reader_from_filename(filename) data = [reader] event_databases = find_events(data, save_file_names=["_test_passing_reader.h5"]) self.assertEqual(len(event_databases), 1) h5file = ed.open_file(event_databases[0], mode="r") self._test_chimera_no_noise_2events_1levels_wrapper(h5file) h5file.close() os.remove(event_databases[0])
def add_selections(self, file_names, params): """ Plots event statistics. """ files = [] counts = [] event_count = 0 for filename in file_names: h5file = ed.open_file(filename, mode='r') files.append(h5file) count = h5file.get_event_count() event_count += count counts.append(count) current_blockade = np.empty(event_count) dwell_times = np.empty(event_count) count = 0 for j, filex in enumerate(files): event_table = filex.get_event_table() sample_rate = filex.get_sample_rate() for i, row in enumerate(event_table): current_blockade[count + i] = row['current_blockage'] dwell_times[count + i] = row['event_length'] / sample_rate count += counts[j] color = params['color'] new_color = QtGui.QColor(color.red(), color.green(), color.blue(), 128) self.plot_event_dur.add_histogram(dwell_times, color=new_color) self.plot_event_depth.add_histogram(current_blockade, color=new_color) scatter_item = ScatterPlotItem(size=10, pen=mkPen(None), brush=new_color, files=file_names, counts=counts) scatter_item.setData(dwell_times, current_blockade) self.plot_event_dur_event_depth.addItem(scatter_item) scatter_item.sigClicked.connect(self.on_scatter_points_clicked) for filex in files: filex.close() return
def test_passing_reader(self): """ Tests that passing an open subtype of :py:class:`pypore.i_o.abstract_reader.AbstractReader` works. """ filename = os.path.dirname(os.path.realpath(__file__)) filename = tf.get_abs_path('chimera_nonoise_2events_1levels.log') reader = get_reader_from_filename(filename) data = [reader] event_databases = find_events( data, save_file_names=['_test_passing_reader.h5']) self.assertEqual(len(event_databases), 1) h5file = ed.open_file(event_databases[0], mode='r') self._test_chimera_no_noise_2events_1levels_wrapper(h5file) h5file.close() os.remove(event_databases[0])
def _on_eventview_file_item_doubleclick(self, item): """ """ self.event_view_item = item h5file = eD.open_file(item.get_file_name()) event_count = h5file.get_event_count() if h5file.is_debug(): self.plot_debug(h5file) h5file.close() self.event_display_edit.setMaxLength(int(event_count / 10) + 1) self.event_display_edit.setValidator(QtGui.QIntValidator(1, event_count, self.event_display_edit)) self.event_count_text.setText('/' + str(event_count)) self.event_display_edit.setText('') self.event_display_edit.setText('1')
def plot_single_events(self, event): """ Plots the event on the plot with """ h5file = eD.open_file(self.event_view_item.get_file_name(), mode='r') event_count = h5file.get_event_count() for i in xrange(3): for j in xrange(3): pos = 3 * i + j if pos + event >= event_count or pos + event < 0: self.eventviewer_plots[pos].clear() self.eventviewer_plots[pos].setTitle('') else: self.plot_single_event(h5file, event + pos, self.eventviewer_plots[pos]) self.eventviewer_plots[pos].setTitle('Event ' + str(event + pos + 1)) h5file.close()
def _on_eventview_file_item_doubleclick(self, item): """ """ self.event_view_item = item h5file = eD.open_file(item.get_file_name()) event_count = h5file.get_event_count() if h5file.is_debug(): self.plot_debug(h5file) h5file.close() self.event_display_edit.setMaxLength(int(event_count / 10) + 1) self.event_display_edit.setValidator( QtGui.QIntValidator(1, event_count, self.event_display_edit)) self.event_count_text.setText('/' + str(event_count)) self.event_display_edit.setText('') self.event_display_edit.setText('1')
def test_initialize_events_database_with_debug(self): filename = 'test_initialize_events_database_with_debug.h5' if os.path.exists(filename): os.remove(filename) n_points = 100 n_channels = 2 database = eD.open_file(filename, mode='w', debug=True, n_points=n_points, n_channels=n_channels) self._test_initial_root(database) self._test_empty_events_group(events_group=database.root.events) # Make sure is debug self.assertTrue(database.is_debug()) # Make sure the debug group is there. names = [x._v_name for x in database.walk_groups()] self.assertIn('debug', names, 'No debug group.') debug_group = database.root.debug # Make sure the debug group debug_group_names = [x._v_name for x in database.walk_nodes(debug_group)] print debug_group_names self.assertIn('data', debug_group_names, 'No data matrix.') self.assertIn('baseline', debug_group_names, 'No baseline matrix.') self.assertIn('threshold_positive', debug_group_names, 'No positive threshold matrix.') self.assertIn('threshold_negative', debug_group_names, 'No negative threshold matrix.') # Make sure arrays are correct dimensions self.assertEqual(debug_group.data.shape, (n_channels, n_points)) self.assertEqual(debug_group.baseline.shape, (n_channels, n_points)) self.assertEqual(debug_group.threshold_positive.shape, (n_channels, n_points)) self.assertEqual(debug_group.threshold_negative.shape, (n_channels, n_points)) # Check is in write mode database.root.events.raw_data.append(np.zeros((1, 100))) database.flush() database.close() os.remove(filename)
def move_event_display_by(self, count): """ Changes the event displayed on the event display plot to current value + count """ h5_event_count = 0 try: h5file = eD.open_file(self.event_view_item.get_file_name()) h5_event_count = h5file.get_event_count() h5file.close() except: return try: event_count = int(self.event_display_edit.text()) if 0 < event_count + count <= h5_event_count: self.event_display_edit.setText(str(event_count + count)) except ValueError: # if we can't parse the event display text but there are events, # just set to zero if h5_event_count > 0: self.event_display_edit.setText('1')
def test_chimera_no_noise_1event(self): filename = os.path.dirname(os.path.realpath(__file__)) filename = os.path.join(filename, 'testDataFiles', 'chimera_nonoise_1event.log') event_database = find_events( [filename], save_file_names=['_testChimera_nonoise_1Event_9238.h5'])[0] h5file = ed.open_file(event_database, mode='r') events = h5file.root.events # check event table correct length event_table = events.eventTable self.assertTrue(event_table.nrows, 1) # check raw data array correct length raw_data_matrix = events.raw_data self.assertEqual(raw_data_matrix.nrows, 1) event_length = event_table[0]['event_length'] self.assertEqual(event_length, 1000) # Make sure only 1 event with 1 level levels_matrix = events.levels self.assertEqual(levels_matrix.nrows, 1) n_levels = event_table[0]['n_levels'] self.assertEqual(n_levels, 1) levels = levels_matrix[0] self.assertAlmostEqual(levels[0], 15.13955, 3) # Check 1 event with 1 levelLength lengths_matrix = events.level_lengths self.assertEqual(lengths_matrix.nrows, 1) lengths = lengths_matrix[0] self.assertEqual(lengths[0], 1000) h5file.close() # delete the newly created event file os.remove(event_database)
def test_too_large_end_threshold(self, filename): """ Tests that we don't find events when the ending threshold is too large. """ data_file = tf.get_abs_path('chimera_1event.log') parameters = Parameters( threshold_strategy=AbsoluteChangeThresholdStrategy(2., 1000.)) event_databases = find_events([data_file], parameters=parameters, save_file_names=[filename], debug=True) h5file = ed.open_file(filename, mode='r') event_count = h5file.get_event_count() self.assertEqual( event_count, 0, "Unexpected event count. Should be {0}, was {1}.".format( event_count, 0)) h5file.close()
def test_chimera_no_noise_1event(self): filename = os.path.dirname(os.path.realpath(__file__)) filename = os.path.join(filename, "testDataFiles", "chimera_nonoise_1event.log") event_database = find_events([filename], save_file_names=["_testChimera_nonoise_1Event_9238.h5"])[0] h5file = ed.open_file(event_database, mode="r") events = h5file.root.events # check event table correct length event_table = events.eventTable self.assertTrue(event_table.nrows, 1) # check raw data array correct length raw_data_matrix = events.raw_data self.assertEqual(raw_data_matrix.nrows, 1) event_length = event_table[0]["event_length"] self.assertEqual(event_length, 1000) # Make sure only 1 event with 1 level levels_matrix = events.levels self.assertEqual(levels_matrix.nrows, 1) n_levels = event_table[0]["n_levels"] self.assertEqual(n_levels, 1) levels = levels_matrix[0] self.assertAlmostEqual(levels[0], 15.13955, 3) # Check 1 event with 1 levelLength lengths_matrix = events.level_lengths self.assertEqual(lengths_matrix.nrows, 1) lengths = lengths_matrix[0] self.assertEqual(lengths[0], 1000) h5file.close() # delete the newly created event file os.remove(event_database)
def test_chimera_no_noise_1event(self): filename = tf.get_abs_path('chimera_nonoise_1event.log') event_database = find_events([filename], save_file_names=['_testChimera_nonoise_1Event_9238.h5'])[0] h5file = ed.open_file(event_database, mode='r') events = h5file.root.events # check event table correct length event_table = events.eventTable self.assertTrue(event_table.nrows, 1) # check raw data array correct length raw_data_matrix = events.raw_data self.assertEqual(raw_data_matrix.nrows, 1) event_length = event_table[0]['event_length'] self.assertEqual(event_length, 1000) # Make sure only 1 event with 1 level levels_matrix = events.levels self.assertEqual(levels_matrix.nrows, 1) n_levels = event_table[0]['n_levels'] self.assertEqual(n_levels, 1) levels = levels_matrix[0] self.assertAlmostEqual(levels[0], 15.13955, 3) # Check 1 event with 1 levelLength lengths_matrix = events.level_lengths self.assertEqual(lengths_matrix.nrows, 1) lengths = lengths_matrix[0] self.assertEqual(lengths[0], 1000) h5file.close() # delete the newly created event file os.remove(event_database)
def on_scatter_points_clicked(self, plot, points): """ Callback for when a scatter plot points are clicked. Highlights the points and un-highlights previously selected points. plot should be a MyScatterPlotItem points should be a MySpotItem """ for p in self.last_scatter_clicked: p.resetPen() # remove point we've already selected so we # can select points behind it. if p in points and len(points) > 1: points.remove(p) # print 'Points clicked:', points, plot for point in points: point.setPen('w', width=2) self.last_scatter_clicked = [point] break # only take first point # Plot the new point clicked on the single event display filename, position = plot.get_file_name_from_position(self.last_scatter_clicked[0].event_position) h5file = ed.open_file(filename, mode='r') row = h5file.get_event_row(position) array_row = row['array_row'] sample_rate = h5file.get_sample_rate() event_length = row['event_length'] raw_points_per_side = row['raw_points_per_side'] raw_data = h5file.get_raw_data_at(array_row) n = len(raw_data) times = np.linspace(0.0, 1.0 * n / sample_rate, n) self.plot_scatter_select.clear() self.plot_scatter_select.plot(times, raw_data) # plot the event points in yellow self.plot_scatter_select.plot(times[raw_points_per_side:raw_points_per_side + event_length], raw_data[raw_points_per_side:raw_points_per_side + event_length], pen='y') # Plot the cusum levels n_levels = row['n_levels'] baseline = row['baseline'] # left, start-1, start, levels = h5file.get_levels_at(array_row) indices = h5file.get_level_lengths_at(array_row) level_times = np.zeros(2 * n_levels + 4) level_values = np.zeros(2 * n_levels + 4) level_times[1] = 1.0 * (raw_points_per_side - 1) / sample_rate level_values[0] = level_values[1] = baseline i = 0 length = 0 for i in xrange(n_levels): level_times[2 * i + 2] = times[raw_points_per_side] + 1.0 * length / sample_rate level_values[2 * i + 2] = levels[i] level_times[2 * i + 3] = times[raw_points_per_side] + 1.0 * (length + indices[i]) / sample_rate level_values[2 * i + 3] = levels[i] length += indices[i] i += 1 level_times[2 * i + 2] = times[raw_points_per_side + event_length] level_times[2 * i + 3] = times[n - 1] level_values[2 * i + 2] = level_values[2 * i + 3] = baseline self.plot_scatter_select.plot(level_times, level_values, pen='g') h5file.close()
def on_scatter_points_clicked(self, plot, points): """ Callback for when a scatter plot points are clicked. Highlights the points and un-highlights previously selected points. plot should be a MyScatterPlotItem points should be a MySpotItem """ for p in self.last_scatter_clicked: p.resetPen() # remove point we've already selected so we # can select points behind it. if p in points and len(points) > 1: points.remove(p) # print 'Points clicked:', points, plot for point in points: point.setPen('w', width=2) self.last_scatter_clicked = [point] break # only take first point # Plot the new point clicked on the single event display filename, position = plot.get_file_name_from_position( self.last_scatter_clicked[0].event_position) h5file = ed.open_file(filename, mode='r') row = h5file.get_event_row(position) array_row = row['array_row'] sample_rate = h5file.get_sample_rate() event_length = row['event_length'] raw_points_per_side = row['raw_points_per_side'] raw_data = h5file.get_raw_data_at(array_row) n = len(raw_data) times = np.linspace(0.0, 1.0 * n / sample_rate, n) self.plot_scatter_select.clear() self.plot_scatter_select.plot(times, raw_data) # plot the event points in yellow self.plot_scatter_select.plot( times[raw_points_per_side:raw_points_per_side + event_length], raw_data[raw_points_per_side:raw_points_per_side + event_length], pen='y') # Plot the cusum levels n_levels = row['n_levels'] baseline = row['baseline'] # left, start-1, start, levels = h5file.get_levels_at(array_row) indices = h5file.get_level_lengths_at(array_row) level_times = np.zeros(2 * n_levels + 4) level_values = np.zeros(2 * n_levels + 4) level_times[1] = 1.0 * (raw_points_per_side - 1) / sample_rate level_values[0] = level_values[1] = baseline i = 0 length = 0 for i in xrange(n_levels): level_times[ 2 * i + 2] = times[raw_points_per_side] + 1.0 * length / sample_rate level_values[2 * i + 2] = levels[i] level_times[2 * i + 3] = times[raw_points_per_side] + 1.0 * ( length + indices[i]) / sample_rate level_values[2 * i + 3] = levels[i] length += indices[i] i += 1 level_times[2 * i + 2] = times[raw_points_per_side + event_length] level_times[2 * i + 3] = times[n - 1] level_values[2 * i + 2] = level_values[2 * i + 3] = baseline self.plot_scatter_select.plot(level_times, level_values, pen='g') h5file.close()
def setUp(self): self.filename = 'testEventDatabase_938247283278128.h5' self.max_event_length = 100 self.database = eD.open_file(self.filename, mode='w', maxEventLength=self.max_event_length)