class ActigraphDBTest2(unittest.TestCase): @timing def setUp(self): np.set_printoptions(suppress=True) print(__file__ + ' Creating database') # Will read already existing file self.db = DBManager('actigraph.db', False, False) @timing def tearDown(self): self.db.commit() def test_read_all_participant(self): participants = self.db.get_all_participants() print(participants) for participant in participants: print(participant, participant.group) def test_read_all_data(self): recordsets = self.db.get_all_recordsets() for record in recordsets: all_data = self.db.get_all_sensor_data(record, convert=True) for data in all_data: for time in data.to_time_series()['time']: print('time', time) break
def test_loading(self): manager = DBManager('test.db', overwrite=True) participant = Participant(name='My Participant', description='Participant Description') manager.update_participant(participant) # Import to database importer = ActigraphImporter(manager, participant) results = importer.load('../../../resources/samples/test.gt3x') samples = 0 for activity in results[1]['activity']: samples += 3 * len(activity[1]) print('samples imported (each channels)', samples) importer.import_to_database(results) # Reload from database manager = DBManager('test.db') recordsets = manager.get_all_recordsets() self.assertGreater(len(recordsets), 0) loaded_samples = 0 for record in recordsets: # Get all sensors in record sensors = manager.get_all_sensors() for sensor in sensors: if sensor.id_sensor_type == SensorType.ACCELEROMETER: channels = manager.get_all_channels(sensor=sensor) for channel in channels: print('processing channel: ', channel) self.assertEqual(channel.id_sensor, sensor.id_sensor, "id_sensor test for channel") # Will get all data (converted to floats) channel_data = manager.get_all_sensor_data( recordset=record, convert=True, channel=channel) print('channel_data_length', len(channel_data)) for sensor_data in channel_data: self.assertEqual(sensor_data.id_channel, channel.id_channel, "id_channel test for data") self.assertEqual(sensor_data.id_sensor, sensor.id_sensor, "id_sensor test data") loaded_samples += len(sensor_data.data) self.assertEqual(samples, loaded_samples)
def calculate(self, manager: DBManager, recordsets: list): results = [] for record in recordsets: # Get all sensors in record sensors = manager.get_all_sensors( id_sensor_type=SensorType.ACCELEROMETER) for sensor in sensors: # print('Found Accelerometer') channels = manager.get_all_channels(sensor=sensor) samples_num = 0 # print('Found channels: ', channels) all_channels_data = { 'Accelerometer_X': [], 'Accelerometer_Y': [], 'Accelerometer_Z': [] } for channel_index, channel in enumerate(channels): # if channel.label == 'Accelerometer_Y': # print('Processing Channel :', channel) # Will get all data (converted to floats) channel_data = manager.get_all_sensor_data( recordset=record, convert=True, sensor=sensor, channel=channel) if len(channel_data) > 0: for data in channel_data: all_channels_data[channel.label].append(data) if channel_index == 0: # Compute number of samples total, if we are at the first channel samples_num += len(data.data) if len(all_channels_data['Accelerometer_X']) > 0: # Process all sensor data result = { 'id_recordset': record.id_recordset, 'result_name': record.name + ' (' + sensor.location + '/' + sensor.name + ')', 'id_sensor': sensor.id_sensor, 'result': self.do_calculation(all_channels_data, sensor.sampling_rate, samples_num) } results.append(result) # Return an array with results for each recordset return results
def calculate(self, manager: DBManager, recordsets: list): print('FreedsonAdult1998.calculate') print('Using recordsets', recordsets) results = [] for record in recordsets: # Get all sensors in record sensors = manager.get_all_sensors( id_sensor_type=SensorType.ACCELEROMETER) for sensor in sensors: # print('Found Accelerometer') channels = manager.get_all_channels(sensor=sensor) # print('Found channels: ', channels) for channel in channels: if channel.label == 'Accelerometer_Y': # print('Processing Channel :', channel) # Will get all data (converted to floats) channel_data = manager.get_all_sensor_data( recordset=record, convert=True, sensor=sensor, channel=channel) if len(channel_data) > 0: # Process all sensor data result = { 'id_recordset': record.id_recordset, 'result_name': record.name + ' (' + sensor.location + '/' + sensor.name + ')', 'id_sensor': sensor.id_sensor, 'result': freedson_adult_1998(channel_data, sensor.sampling_rate) } results.append(result) # Return an array with results for each recordset return results
if not os.path.isfile(db_filename): print('importing actigraph data') import_data() manager = DBManager(db_filename) # Get recordsets recordsets = manager.get_all_recordsets() for record in recordsets: # Get all sensors in record sensors = manager.get_all_sensors() for sensor in sensors: if sensor.id_sensor_type == SensorType.ACCELEROMETER: print('Found Accelerometer') channels = manager.get_all_channels(sensor=sensor) for channel in channels: if channel.label == 'Accelerometer_Y': print('Processing Channel :', channel) # Will get all data (converted to floats) channel_data = manager.get_all_sensor_data( recordset=record, convert=True, sensor=sensor, channel=channel) # Process all sensor data results = freedson_adult_1998(channel_data, sensor.sampling_rate)
def test_get_all_sensor_data_with_args(self): manager = DBManager(filename='openimu.db', overwrite=True, echo=False) # Create sensor in DB group = manager.update_group( Group(name='Group Name', description='Group Description')) participant = manager.update_participant( Participant(name='Participant Name', description='Participant Description', group=group)) sensor = manager.add_sensor(SensorType.ACCELEROMETER, 'Sensor Name', 'Hardware Name', 'Wrist', 30.0, 1) sensor2 = manager.add_sensor(SensorType.GYROMETER, 'Sensor Name', 'Hardware Name', 'Wrist', 30.0, 1) channel1 = manager.add_channel(sensor, Units.GRAVITY_G, DataFormat.FLOAT32, 'Accelerometer_X') channel2 = manager.add_channel(sensor, Units.GRAVITY_G, DataFormat.FLOAT32, 'Accelerometer_Y') timestamps = SensorTimestamps() timestamps.timestamps = np.zeros(40, dtype=np.float64) # will set start and end timestamps.update_timestamps() recordset = manager.add_recordset(participant, 'My Record', timestamps.start_timestamp, timestamps.end_timestamp) data = np.zeros(40, dtype=np.float32) sensordata = manager.add_sensor_data(recordset, sensor, channel1, timestamps, data) sensordata = manager.add_sensor_data(recordset, sensor, channel2, timestamps, data) manager.commit() # Test with no args, return everything in the recordset sensordata_res = manager.get_all_sensor_data(recordset=recordset, convert=True) self.assertEqual(len(sensordata_res), 2) for sensor_data in sensordata_res: self.assertEqual(len(sensor_data.data), len(data)) # Test with a valid sensor arg sensordata_res = manager.get_all_sensor_data(recordset=recordset, convert=True, sensor=sensor) self.assertEqual(len(sensordata_res), 2) for sensor_data in sensordata_res: self.assertEqual(len(sensor_data.data), len(data)) # Test with not the right sensor arg sensordata_res = manager.get_all_sensor_data(recordset=recordset, convert=True, sensor=sensor2) self.assertEqual(len(sensordata_res), 0) # Testing with invalid sensor arg sensordata_res = manager.get_all_sensor_data(recordset=recordset, convert=True, sensor=Sensor()) self.assertEqual(len(sensordata_res), 0) # Testing with channel1 sensordata_res = manager.get_all_sensor_data(recordset=recordset, convert=True, channel=channel1) self.assertEqual(len(sensordata_res), 1) for sensor_data in sensordata_res: self.assertEqual(len(sensor_data.data), len(data)) # Testing with channel2 sensordata_res = manager.get_all_sensor_data(recordset=recordset, convert=True, channel=channel2) self.assertEqual(len(sensordata_res), 1) for sensor_data in sensordata_res: self.assertEqual(len(sensor_data.data), len(data)) # Testing with invalid channel sensordata_res = manager.get_all_sensor_data(recordset=recordset, convert=True, channel=Channel()) self.assertEqual(len(sensordata_res), 0) manager.close()
class ActigraphDBTest1(unittest.TestCase): @timing def setUp(self): np.set_printoptions(suppress=True) print(__file__ + ' Creating database') self.db = DBManager('actigraph.db', True, False) self.group = self.db.add_group('MyGroup', 'MyDescription') self.participant = self.db.add_participant(group=self.group, name='Anonymous', description='Participant') @timing def tearDown(self): self.db.commit() @timing def load_file(self, name='../resources/samples/test.gt3x'): print('loading file :', name) result = gt3x_importer(name) self.assertEqual(len(result), 2) return result def add_recordset_to_db(self, name, start_timestamp, stop_timestamp): recordset = self.db.add_recordset(self.participant, name, start_timestamp, stop_timestamp) return recordset def add_sensor_to_db(self, sensor_type, name, hw_name, location, sampling_rate, data_rate): # _id_sensor_type, _name, _hw_name, _location, _sampling_rate, _data_rate): sensor = self.db.add_sensor(sensor_type, name, hw_name, location, sampling_rate, data_rate) return sensor def add_channel_to_db(self, sensor, unit, data_format, label): channel = self.db.add_channel(sensor, unit, data_format, label) return channel def add_sensor_data_to_db(self, recordset, sensor, channel, timestamp, data): sensor_data = self.db.add_sensor_data(recordset, sensor, channel, timestamp, data) return sensor_data @timing def commit(self): return self.db.commit() @timing def flush(self): return self.db.flush() def test_import(self): # Return file info and data contents """ return [info, {'activity': activity_data, 'battery': battery_data, 'lux': lux_data, 'event': event_data, 'parameters': parameters_data, 'metadata': metadata_data }] """ [info, data] = self.load_file() self.assertTrue(len(info) > 0) self.assertTrue(len(data) == 6) print(info) # Creating recordset # print(info['Start Date'], info['Last Sample Time']) start = int(info['Start Date']) stop = int(info['Last Sample Time']) print(start, stop) start_timestamp = ticksconverter(start) end_timestamp = ticksconverter(stop) print(start_timestamp, end_timestamp) recordset = self.add_recordset_to_db(info['Subject Name'], start_timestamp, end_timestamp) print(recordset) if data.__contains__('activity'): print('activity found') # Create sensor accelerometer_sensor = self.add_sensor_to_db( SensorType.ACCELEROMETER, 'Accelerometer', info['Device Type'], 'Unknown', info['Sample Rate'], 1) accelerometer_channels = list() # Create channels accelerometer_channels.append( self.add_channel_to_db(accelerometer_sensor, Units.GRAVITY_G, DataFormat.FLOAT32, 'Accelerometer_Y')) accelerometer_channels.append( self.add_channel_to_db(accelerometer_sensor, Units.GRAVITY_G, DataFormat.FLOAT32, 'Accelerometer_X')) accelerometer_channels.append( self.add_channel_to_db(accelerometer_sensor, Units.GRAVITY_G, DataFormat.FLOAT32, 'Accelerometer_Z')) # Should be 1970, epoch last_timestamp = 0 all_timestamps = [] value_dict = {} # Import data for epoch in data['activity']: # An epoch will contain a timestamp and array with each acc_x, acc_y, acc_z self.assertEqual(len(epoch), 2) current_timestamp = epoch[0] # print('current_timestamp', current_timestamp, current_timestamp == (last_timestamp + 1)) # Check for consecutive timestamps create_array = current_timestamp != (last_timestamp + 1) # Do not allow more than one hour of consecutive data if create_array is not True: if current_timestamp - all_timestamps[-1] >= 3600: create_array = True # Consecutive timestamps? if create_array is True: all_timestamps.append(current_timestamp) # Create list for all values for this timestamp value_dict[current_timestamp] = [list(), list(), list()] # Get data samples = epoch[1] # Separate write for each channel for index in range(0, len(accelerometer_channels)): # Using last timestamp to append data value_dict[all_timestamps[-1]][index].append( samples[:, index]) # Update timestamp last_timestamp = current_timestamp # Insert into DB as chunks of data # print('should insert records count: ', len(all_timestamps)) # print('should insert data count:', len(value_dict)) for timestamp in all_timestamps: for index in range(0, len(value_dict[timestamp])): # print(index, timestamp, len(value_dict[timestamp][index])) vector = np.concatenate(value_dict[timestamp][index]) # print('vector: ', len(vector), vector.shape, vector.dtype) if len(vector) > 0: self.add_sensor_data_to_db( recordset, accelerometer_sensor, accelerometer_channels[index], datetime.datetime.fromtimestamp(timestamp), vector) # Flush DB self.flush() if data.__contains__('battery'): print('battery found') # Create sensor volt_sensor = self.add_sensor_to_db(SensorType.BATTERY, 'Battery', info['Device Type'], 'Unknown', 0, 1) # Create channel volt_channel = self.add_channel_to_db(volt_sensor, Units.VOLTS, DataFormat.FLOAT32, 'Battery') for epoch in data['battery']: timestamp = datetime.datetime.fromtimestamp(epoch[0]) value = np.float32(epoch[1]) self.assertEqual(len(value.tobytes()), 4) self.add_sensor_data_to_db(recordset, volt_sensor, volt_channel, timestamp, value) # Flush to DB (ram) self.flush() if data.__contains__('lux'): print('lux found') # Create sensor lux_sensor = self.add_sensor_to_db(SensorType.LUX, 'Lux', info['Device Type'], 'Unknown', 1, 1) # Create channel lux_channel = self.add_channel_to_db(lux_sensor, Units.LUX, DataFormat.FLOAT32, 'Lux') for epoch in data['lux']: timestamp = datetime.datetime.fromtimestamp(epoch[0]) value = np.float32(epoch[1]) self.assertEqual(len(value.tobytes()), 4) self.add_sensor_data_to_db(recordset, lux_sensor, lux_channel, timestamp, value) # Flush to DB (ram) self.flush() # Write data to file self.commit() @timing def read_back_data(self, participant): recordsets = self.db.get_all_recordsets(participant) print(recordsets) for record in recordsets: alldata = self.db.get_all_sensor_data(record) print('recordset size', len(alldata)) for data in alldata: print('type', type(data.data)) break def test_reload_from_db(self): # Import data first self.test_import() self.read_back_data(self.participant)