def run(self): self.time1 = time.time() if os.name == 'posix': self._pipe, child_conn = Pipe() self.p = Process(target=find_events, args=(self.file_names,), kwargs={'parameters': self.parameters, 'pipe': child_conn, 'debug': self.debug, 'save_file_names': self.save_file_names}) self.p.start() # child_conn needs to be closed in all processes before EOFError is thrown (on Linux) # So close it here immediately child_conn.close() while True: time.sleep(0) try: data = self._pipe.recv() if 'status_text' in data: self.status_text = data['status_text'] if 'Events' in data: self.events += data['Events'] except: break else: # TODO add ability to listen for info from find_events on Windows. # If we are on windows, we can only fork a process if __name__ == '__main__'. Which # is not true here (because AnalyzeDataThread is imported). # So on Windows, just use this thread, don't use an additional separate process. find_events(self.file_names, parameters=self.parameters, debug=self.debug, save_file_names=self.save_file_names) self.cancelled = True
def test_debug_option(self): filename = os.path.dirname(os.path.realpath(__file__)) filename = tf.get_abs_path('chimera_nonoise_2events_1levels.log') output_filename = '_test_debug_option.h5' reader = get_reader_from_filename(filename) data = [reader] event_databases = find_events(data, save_file_names=[output_filename], debug=True) data = reader.get_all_data()[0] reader.close() self.assertEqual(len(event_databases), 1) h5file = ed.open_file(event_databases[0], mode='r') self._test_chimera_no_noise_2events_1levels_wrapper(h5file) # check that the file has the correct groups groups = [x._v_name for x in h5file.walk_groups()] self.assertIn('debug', groups, "No debug group in debug file.") print "data:", h5file.root.debug.data[:] np.testing.assert_array_equal(data, h5file.root.debug.data[0][:]) h5file.close() os.remove(event_databases[0])
def _test_number_of_events_help(name): # Need to seed, otherwise each process will generate the same random data! np.random.seed() # Check that setting the event rate gives correct number of events. seconds = 5. sample_rate = 1.e6 baseline = 1. event_rate = 50. event_duration = stats.norm(loc=100.e-6, scale=5.e-6) event_depth = stats.norm(loc=-1., scale=.05) noise = stats.norm(scale=0.02) # create a list of file_names so we can average the number of events. if os.path.exists(name): os.remove(name) n_e_r = create_random_data(filename=name, seconds=seconds, sample_rate=sample_rate, baseline=baseline, noise=noise, event_rate=event_rate, event_durations=event_duration, event_depths=event_depth) save_file_name = name[:-len('.h5')] + '_Events.h5' if os.path.exists(save_file_name): os.remove(save_file_name) event_database = find_events([name], save_file_names=[save_file_name])[0] ed = EventDatabase(event_database) n_events_found = ed.get_event_count() os.remove(name) ed.close() os.remove(event_database) return n_e_r, n_events_found, name
def test_multiple_files(self): filename1 = os.path.dirname(os.path.realpath(__file__)) filename1 = os.path.join(filename1, 'testDataFiles', 'chimera_nonoise_2events_1levels.log') filename2 = os.path.dirname(os.path.realpath(__file__)) filename2 = os.path.join(filename2, 'testDataFiles', 'chimera_nonoise_1event_2levels.log') file_names = [filename1, filename2] event_databases = find_events(file_names, save_file_names=[ '_testMultipleFiles_1_9238.h5', '_testMultipleFiles_2_9238.h5' ]) self.assertEqual(len(event_databases), 2) h5file = ed.open_file(event_databases[0], mode='r') self._test_chimera_no_noise_2events_1levels_wrapper(h5file) h5file.close() os.remove(event_databases[0]) h5file = ed.open_file(event_databases[1], mode='r') self._test_chimera_no_noise_1event_2levels_helper(h5file) h5file.close() os.remove(event_databases[1])
def test_good_thresholds(self, filename): """ Tests that we find the correct number of events when the starting and ending thresholds are appropriate. """ data_file = tf.get_abs_path('chimera_1event.log') parameters = Parameters( threshold_strategy=AbsoluteChangeThresholdStrategy(2., 1.)) event_databases = find_events([data_file], parameters=parameters, save_file_names=[filename], debug=True) h5file = ed.open_file(filename, mode='r') #Check the number of events event_count = h5file.get_event_count() self.assertEqual( event_count, 1, "Unexpected event count. Should be {0}, was {1}.".format( event_count, 0)) #Check the event length sample_rate = h5file.get_sample_rate() event_length = h5file.get_event_row(0)['event_length'] / sample_rate event_length_should_be = 0.00024 percent_diff = abs(event_length - event_length_should_be) / event_length_should_be self.assertLessEqual( percent_diff, 0.05, "Unexpected event length. Should be {0}, was {1}.".format( event_length_should_be, event_length)) h5file.close()
def test_good_thresholds(self, filename): """ Tests that we find the correct number of events when the starting and ending thresholds are appropriate. """ data_file = tf.get_abs_path('chimera_1event.log') parameters = Parameters(threshold_strategy=AbsoluteChangeThresholdStrategy(2., 1.)) event_databases = find_events([data_file], parameters=parameters, save_file_names=[filename], debug=True) h5file = ed.open_file(filename, mode='r') #Check the number of events event_count = h5file.get_event_count() self.assertEqual(event_count, 1, "Unexpected event count. Should be {0}, was {1}.".format(event_count, 0)) #Check the event length sample_rate = h5file.get_sample_rate() event_length = h5file.get_event_row(0)['event_length']/sample_rate event_length_should_be = 0.00024 percent_diff = abs(event_length - event_length_should_be) / event_length_should_be self.assertLessEqual(percent_diff, 0.05, "Unexpected event length. Should be {0}, was {1}.".format(event_length_should_be, event_length)) h5file.close()
def test_chimera_no_noise_1event_2levels(self): filename = tf.get_abs_path('chimera_nonoise_1event_2levels.log') event_database = find_events([filename], save_file_names=['_testChimera_nonoise_1Event_2Levels_9238.h5'])[0] h5file = ed.open_file(event_database, mode='r') self._test_chimera_no_noise_1event_2levels_helper(h5file) h5file.close() # delete the newly created event file os.remove(event_database)
def test_chimera_no_noise_1event_2levels(self): filename = os.path.dirname(os.path.realpath(__file__)) filename = os.path.join(filename, "testDataFiles", "chimera_nonoise_1event_2levels.log") event_database = find_events([filename], save_file_names=["_testChimera_nonoise_1Event_2Levels_9238.h5"])[0] h5file = ed.open_file(event_database, mode="r") self._test_chimera_no_noise_1event_2levels_helper(h5file) h5file.close() # delete the newly created event file os.remove(event_database)
def test_chimera_no_noise_1event_2levels(self): filename = tf.get_abs_path('chimera_nonoise_1event_2levels.log') event_database = find_events( [filename], save_file_names=['_testChimera_nonoise_1Event_2Levels_9238.h5'])[0] h5file = ed.open_file(event_database, mode='r') self._test_chimera_no_noise_1event_2levels_helper(h5file) h5file.close() # delete the newly created event file os.remove(event_database)
def test_event_params(self, filename): """ Tests that setting the event depth and duration give correct values. """ seconds = 5. sample_rate = 1.e6 baseline = 10. event_rate = 100. event_duration_loc = 1.e-4 event_duration_scale = 5.e-6 event_duration = stats.norm(loc=event_duration_loc, scale=event_duration_scale) event_depth_loc = -1. event_depth_scale = .05 event_depth = stats.norm(loc=-1., scale=.05) noise = stats.norm(scale=0.01) n_events_returned = create_random_data(filename, seconds=seconds, sample_rate=sample_rate, baseline=baseline, event_rate=event_rate, event_durations=event_duration, event_depths=event_depth, noise=noise) save_file_name = filename[:-len('.h5')] + 'Events.h5' if os.path.exists(save_file_name): os.remove(save_file_name) event_database = find_events([filename], save_file_names=[save_file_name])[0] ed = EventDatabase(event_database) count = ed.get_event_count() count_should_be = event_rate * seconds diff = abs(count - count_should_be) self.assertLessEqual(diff, 100, "Unexpected number of events. " "Expected {0}, was {1}.".format(count_should_be, count)) table_sample_rate = ed.get_sample_rate() durations = [x['event_length'] / table_sample_rate for x in ed.get_event_table().iterrows()] depths = [x['current_blockage'] for x in ed.get_event_table().iterrows()] mean_duration = np.mean(durations) self.assertAlmostEqual(event_duration_loc, mean_duration, 5, "Unexpected mean event duration. " "Wanted {0}, got {1}.".format(event_duration_loc, mean_duration)) mean_depth = np.mean(depths) self.assertAlmostEqual(event_depth_loc, mean_depth, 1, "Unexpected mean event depth. " "Wanted {0}, got {1}.".format(event_depth_loc, mean_depth)) ed.close() os.remove(event_database)
def test_chimera_no_noise_1event_2levels(self): filename = os.path.dirname(os.path.realpath(__file__)) filename = os.path.join(filename, 'testDataFiles', 'chimera_nonoise_1event_2levels.log') event_database = find_events( [filename], save_file_names=['_testChimera_nonoise_1Event_2Levels_9238.h5'])[0] h5file = ed.open_file(event_database, mode='r') self._test_chimera_no_noise_1event_2levels_helper(h5file) h5file.close() # delete the newly created event file os.remove(event_database)
def test_chimera_no_noise_2events_1levels(self): filename = tf.get_abs_path('chimera_nonoise_2events_1levels.log') event_databases = find_events([filename], save_file_names=['_testChimera_nonoise_2events_1levels_9238.h5']) self.assertEqual(len(event_databases), 1) event_database = event_databases[0] h5file = ed.open_file(event_database, mode='r') self._test_chimera_no_noise_2events_1levels_wrapper(h5file) h5file.close() # delete the newly created event file os.remove(event_database)
def run(self): self.time1 = time.time() if os.name == 'posix': self._pipe, child_conn = Pipe() self.p = Process(target=find_events, args=(self.file_names, ), kwargs={ 'parameters': self.parameters, 'pipe': child_conn, 'debug': self.debug, 'save_file_names': self.save_file_names }) self.p.start() # child_conn needs to be closed in all processes before EOFError is thrown (on Linux) # So close it here immediately child_conn.close() while True: time.sleep(0) try: data = self._pipe.recv() if 'status_text' in data: self.status_text = data['status_text'] if 'Events' in data: self.events += data['Events'] except: break else: # TODO add ability to listen for info from find_events on Windows. # If we are on windows, we can only fork a process if __name__ == '__main__'. Which # is not true here (because AnalyzeDataThread is imported). # So on Windows, just use this thread, don't use an additional separate process. find_events(self.file_names, parameters=self.parameters, debug=self.debug, save_file_names=self.save_file_names) self.cancelled = True
def test_saving_files(self): filename = tf.get_abs_path('chimera_1event.log') event_database = find_events([filename], save_file_names=['_testSavingFiles_9238.h5'])[0] self.assertTrue(os.path.isfile(event_database)) h5file = ed.open_file(event_database, mode='r') self.assertTrue(h5file.isopen) h5file.close() # delete the newly created event file os.remove(event_database)
def test_saving_files(self): filename = os.path.dirname(os.path.realpath(__file__)) filename = os.path.join(filename, "testDataFiles", "chimera_1event.log") event_database = find_events([filename], save_file_names=["_testSavingFiles_9238.h5"])[0] self.assertTrue(os.path.isfile(event_database)) h5file = ed.open_file(event_database, mode="r") self.assertTrue(h5file.isopen) h5file.close() # delete the newly created event file os.remove(event_database)
def test_saving_files(self): filename = tf.get_abs_path('chimera_1event.log') event_database = find_events( [filename], save_file_names=['_testSavingFiles_9238.h5'])[0] self.assertTrue(os.path.isfile(event_database)) h5file = ed.open_file(event_database, mode='r') self.assertTrue(h5file.isopen) h5file.close() # delete the newly created event file os.remove(event_database)
def test_chimera_no_noise_2events_1levels(self): filename = tf.get_abs_path('chimera_nonoise_2events_1levels.log') event_databases = find_events( [filename], save_file_names=['_testChimera_nonoise_2events_1levels_9238.h5']) self.assertEqual(len(event_databases), 1) event_database = event_databases[0] h5file = ed.open_file(event_database, mode='r') self._test_chimera_no_noise_2events_1levels_wrapper(h5file) h5file.close() # delete the newly created event file os.remove(event_database)
def test_passing_reader(self): """ Tests that passing an open subtype of :py:class:`pypore.i_o.abstract_reader.AbstractReader` works. """ filename = os.path.dirname(os.path.realpath(__file__)) filename = os.path.join(filename, "testDataFiles", "chimera_nonoise_2events_1levels.log") reader = get_reader_from_filename(filename) data = [reader] event_databases = find_events(data, save_file_names=["_test_passing_reader.h5"]) self.assertEqual(len(event_databases), 1) h5file = ed.open_file(event_databases[0], mode="r") self._test_chimera_no_noise_2events_1levels_wrapper(h5file) h5file.close() os.remove(event_databases[0])
def test_too_large_end_threshold(self, filename): """ Tests that we don't find events when the ending threshold is too large. """ data_file = tf.get_abs_path('chimera_1event.log') parameters = Parameters(threshold_strategy=AbsoluteChangeThresholdStrategy(2., 1000.)) event_databases = find_events([data_file], parameters=parameters, save_file_names=[filename], debug=True) h5file = ed.open_file(filename, mode='r') event_count = h5file.get_event_count() self.assertEqual(event_count, 0, "Unexpected event count. Should be {0}, was {1}.".format(event_count, 0)) h5file.close()
def test_multiple_files(self): filename1 = tf.get_abs_path('chimera_nonoise_2events_1levels.log') filename2 = tf.get_abs_path('chimera_nonoise_1event_2levels.log') file_names = [filename1, filename2] event_databases = find_events(file_names, save_file_names=['_testMultipleFiles_1_9238.h5', '_testMultipleFiles_2_9238.h5']) self.assertEqual(len(event_databases), 2) h5file = ed.open_file(event_databases[0], mode='r') self._test_chimera_no_noise_2events_1levels_wrapper(h5file) h5file.close() os.remove(event_databases[0]) h5file = ed.open_file(event_databases[1], mode='r') self._test_chimera_no_noise_1event_2levels_helper(h5file) h5file.close() os.remove(event_databases[1])
def test_passing_reader(self): """ Tests that passing an open subtype of :py:class:`pypore.i_o.abstract_reader.AbstractReader` works. """ filename = os.path.dirname(os.path.realpath(__file__)) filename = tf.get_abs_path('chimera_nonoise_2events_1levels.log') reader = get_reader_from_filename(filename) data = [reader] event_databases = find_events( data, save_file_names=['_test_passing_reader.h5']) self.assertEqual(len(event_databases), 1) h5file = ed.open_file(event_databases[0], mode='r') self._test_chimera_no_noise_2events_1levels_wrapper(h5file) h5file.close() os.remove(event_databases[0])
def test_multiple_files(self): filename1 = os.path.dirname(os.path.realpath(__file__)) filename1 = os.path.join(filename1, "testDataFiles", "chimera_nonoise_2events_1levels.log") filename2 = os.path.dirname(os.path.realpath(__file__)) filename2 = os.path.join(filename2, "testDataFiles", "chimera_nonoise_1event_2levels.log") file_names = [filename1, filename2] event_databases = find_events( file_names, save_file_names=["_testMultipleFiles_1_9238.h5", "_testMultipleFiles_2_9238.h5"] ) self.assertEqual(len(event_databases), 2) h5file = ed.open_file(event_databases[0], mode="r") self._test_chimera_no_noise_2events_1levels_wrapper(h5file) h5file.close() os.remove(event_databases[0]) h5file = ed.open_file(event_databases[1], mode="r") self._test_chimera_no_noise_1event_2levels_helper(h5file) h5file.close() os.remove(event_databases[1])
def test_chimera_no_noise_1event(self): filename = os.path.dirname(os.path.realpath(__file__)) filename = os.path.join(filename, 'testDataFiles', 'chimera_nonoise_1event.log') event_database = find_events( [filename], save_file_names=['_testChimera_nonoise_1Event_9238.h5'])[0] h5file = ed.open_file(event_database, mode='r') events = h5file.root.events # check event table correct length event_table = events.eventTable self.assertTrue(event_table.nrows, 1) # check raw data array correct length raw_data_matrix = events.raw_data self.assertEqual(raw_data_matrix.nrows, 1) event_length = event_table[0]['event_length'] self.assertEqual(event_length, 1000) # Make sure only 1 event with 1 level levels_matrix = events.levels self.assertEqual(levels_matrix.nrows, 1) n_levels = event_table[0]['n_levels'] self.assertEqual(n_levels, 1) levels = levels_matrix[0] self.assertAlmostEqual(levels[0], 15.13955, 3) # Check 1 event with 1 levelLength lengths_matrix = events.level_lengths self.assertEqual(lengths_matrix.nrows, 1) lengths = lengths_matrix[0] self.assertEqual(lengths[0], 1000) h5file.close() # delete the newly created event file os.remove(event_database)
def test_too_large_end_threshold(self, filename): """ Tests that we don't find events when the ending threshold is too large. """ data_file = tf.get_abs_path('chimera_1event.log') parameters = Parameters( threshold_strategy=AbsoluteChangeThresholdStrategy(2., 1000.)) event_databases = find_events([data_file], parameters=parameters, save_file_names=[filename], debug=True) h5file = ed.open_file(filename, mode='r') event_count = h5file.get_event_count() self.assertEqual( event_count, 0, "Unexpected event count. Should be {0}, was {1}.".format( event_count, 0)) h5file.close()
def test_chimera_no_noise_1event(self): filename = os.path.dirname(os.path.realpath(__file__)) filename = os.path.join(filename, "testDataFiles", "chimera_nonoise_1event.log") event_database = find_events([filename], save_file_names=["_testChimera_nonoise_1Event_9238.h5"])[0] h5file = ed.open_file(event_database, mode="r") events = h5file.root.events # check event table correct length event_table = events.eventTable self.assertTrue(event_table.nrows, 1) # check raw data array correct length raw_data_matrix = events.raw_data self.assertEqual(raw_data_matrix.nrows, 1) event_length = event_table[0]["event_length"] self.assertEqual(event_length, 1000) # Make sure only 1 event with 1 level levels_matrix = events.levels self.assertEqual(levels_matrix.nrows, 1) n_levels = event_table[0]["n_levels"] self.assertEqual(n_levels, 1) levels = levels_matrix[0] self.assertAlmostEqual(levels[0], 15.13955, 3) # Check 1 event with 1 levelLength lengths_matrix = events.level_lengths self.assertEqual(lengths_matrix.nrows, 1) lengths = lengths_matrix[0] self.assertEqual(lengths[0], 1000) h5file.close() # delete the newly created event file os.remove(event_database)
def test_chimera_no_noise_1event(self): filename = tf.get_abs_path('chimera_nonoise_1event.log') event_database = find_events([filename], save_file_names=['_testChimera_nonoise_1Event_9238.h5'])[0] h5file = ed.open_file(event_database, mode='r') events = h5file.root.events # check event table correct length event_table = events.eventTable self.assertTrue(event_table.nrows, 1) # check raw data array correct length raw_data_matrix = events.raw_data self.assertEqual(raw_data_matrix.nrows, 1) event_length = event_table[0]['event_length'] self.assertEqual(event_length, 1000) # Make sure only 1 event with 1 level levels_matrix = events.levels self.assertEqual(levels_matrix.nrows, 1) n_levels = event_table[0]['n_levels'] self.assertEqual(n_levels, 1) levels = levels_matrix[0] self.assertAlmostEqual(levels[0], 15.13955, 3) # Check 1 event with 1 levelLength lengths_matrix = events.level_lengths self.assertEqual(lengths_matrix.nrows, 1) lengths = lengths_matrix[0] self.assertEqual(lengths[0], 1000) h5file.close() # delete the newly created event file os.remove(event_database)
def test_event_params(self, filename): """ Tests that setting the event depth and duration give correct values. """ seconds = 5. sample_rate = 1.e6 baseline = 10. event_rate = 100. event_duration_loc = 1.e-4 event_duration_scale = 5.e-6 event_duration = stats.norm(loc=event_duration_loc, scale=event_duration_scale) event_depth_loc = -1. event_depth_scale = .05 event_depth = stats.norm(loc=-1., scale=.05) noise = stats.norm(scale=0.01) n_events_returned = create_random_data(filename, seconds=seconds, sample_rate=sample_rate, baseline=baseline, event_rate=event_rate, event_durations=event_duration, event_depths=event_depth, noise=noise) save_file_name = filename[:-len('.h5')] + 'Events.h5' if os.path.exists(save_file_name): os.remove(save_file_name) event_database = find_events([filename], save_file_names=[save_file_name])[0] ed = EventDatabase(event_database) count = ed.get_event_count() count_should_be = event_rate * seconds diff = abs(count - count_should_be) self.assertLessEqual( diff, 100, "Unexpected number of events. " "Expected {0}, was {1}.".format(count_should_be, count)) table_sample_rate = ed.get_sample_rate() durations = [ x['event_length'] / table_sample_rate for x in ed.get_event_table().iterrows() ] depths = [ x['current_blockage'] for x in ed.get_event_table().iterrows() ] mean_duration = np.mean(durations) self.assertAlmostEqual( event_duration_loc, mean_duration, 5, "Unexpected mean event duration. " "Wanted {0}, got {1}.".format(event_duration_loc, mean_duration)) mean_depth = np.mean(depths) self.assertAlmostEqual( event_depth_loc, mean_depth, 1, "Unexpected mean event depth. " "Wanted {0}, got {1}.".format(event_depth_loc, mean_depth)) ed.close() os.remove(event_database)