def create_test_data_vector(values_list: list): data_vector = DataVector() for values in values_list: point = DataPoint() for dimension_value in values: point.add_dimension(dimension_value) data_vector.add_point(point) return data_vector
def recalculate_centers(self): """ Recalculte the cluster centers based on the current clusters. """ for i, cluster in enumerate(self.clusters): new_center = DataPoint() for point in cluster: new_center.add_data_counts(point) new_center.divide_by_constant(len(cluster)) self.centers[i] = new_center
def parse_data(file): sensor_data = [] true_data = [] with open(file, 'r') as dat: lines = dat.readlines() for line in lines: x = line.split() if x[0] == 'L': sensor_data_point = DataPoint({ 'timestamp': int(x[3]), 'name': 'lidar', 'x': float(x[1]), 'y': float(x[2]) }) true_data_point = DataPoint({ 'timestamp': int(x[3]), 'name': 'state', 'x': float(x[4]), 'y': float(x[5]), 'vx': float(x[6]), 'vy': float(x[7]) }) elif x[0] == 'R': sensor_data_point = DataPoint({ 'timestamp': int(x[4]), 'name': 'radar', 'rho': float(x[1]), 'phi': float(x[2]), 'drho': float(x[3]) }) true_data_point = DataPoint({ 'timestamp': int(x[4]), 'name': 'state', 'x': float(x[5]), 'y': float(x[6]), 'vx': float(x[7]), 'vy': float(x[8]) }) sensor_data.append(sensor_data_point) true_data.append(true_data_point) return sensor_data, true_data
def empty_bb(self): return BoundBox('empty', [ DataPoint(0, 0, 0), DataPoint(0, 0, 0), DataPoint(0, 0, 0), DataPoint(0, 0, 0), DataPoint(0, 0, 0), DataPoint(0, 0, 0), DataPoint(0, 0, 0), DataPoint(0, 0, 0), DataPoint(0, 0, 0) ])
def send_to_thingspeak(datapoints): mean_data = DataPoint.mean(datapoints) thingspeak_data = mean_data.to_thingspeak() print('sending data\n{}'.format(thingspeak_data)) success = False number_of_retries = 3 while not success and number_of_retries > 0: try: client_id = binascii.hexlify(machine.unique_id()) client = MQTTClient(client_id, 'mqtt.thingspeak.com', user='******', password=MQTT_API_KEY, port=8883, ssl=True) client.connect() client.publish( topic='channels/379710/publish/{}'.format(MQTT_WRITE_API_KEY), msg=thingspeak_data) client.disconnect() success = True except OSError as e: print('network error: {}'.format(e.errno)) number_of_retries -= 1 pass return success
def test_add_point_nonempty_vector_too_small_dimension(self): vector = self.create_test_data_vector([[0, 0]]) point = DataPoint() point.coordinates.append(3) self.assertRaises(ValueError, vector.add_point, point)
def setUp(self) -> None: time_ms = 1881258.816042923 class_label = "normal" mean_id_interval = 0.011034680628272332, variance_id_frequency = 17.72437499999999 mean_id_intervals_variance = 8.354867823557838e-05 mean_data_bit_count = 14.913419913419913 variance_data_bit_count = 74.46869436479822 kurtosis_id_interval = 74.39655141184994 kurtosis_id_frequency = 1.323601387295182 skewness_id_frequency = 0.5522522478213545 kurtosis_variance_data_bit_count_id = 15.457159141707956 skewness_id_interval_variances = 0.694951701467612 self.data_point_row = [ time_ms, class_label, mean_id_interval, variance_id_frequency, mean_id_intervals_variance, mean_data_bit_count, variance_data_bit_count, kurtosis_id_interval, kurtosis_id_frequency, skewness_id_frequency, kurtosis_variance_data_bit_count_id, skewness_id_interval_variances ] self.data_point_object = DataPoint( time_ms, class_label, mean_id_interval, variance_id_frequency, mean_id_intervals_variance, mean_data_bit_count, variance_data_bit_count, kurtosis_id_interval, kurtosis_id_frequency, skewness_id_frequency, kurtosis_variance_data_bit_count_id, skewness_id_interval_variances) self.header = datapoint.datapoint_attributes
def load_data(filename): print('[1] Loading Data ....') book = xlrd.open_workbook(filename) input_sheet = book.sheet_by_name("input") output_sheet = book.sheet_by_name("A_F") num_rows = input_sheet.nrows num_cols = input_sheet.ncols col_values = [] for col in range(1, num_cols): col_values.append(input_sheet.col_values(col)[1:]) x = np.array(col_values, dtype='|S4') y = x.astype(np.float) maxs = [max(y[col]) for col in range(0, num_cols - 1)] mins = [min(y[col]) for col in range(0, num_cols - 1)] data_points = [] for row in range(1, num_rows): values = [] for col in range(1, num_cols): values.append( (float(input_sheet.cell(row, col).value) - mins[col - 1]) / (maxs[col - 1] - mins[col - 1])) d = DataPoint(values, int(output_sheet.cell(row, 0).value)) data_points.append(d) print(num_rows - 1, " points with dimesion=", num_cols - 1, " are added") return data_points
def run(sensor_data, kalmanFilter): state_estimations = [] for data in sensor_data: if first_time: timestamp = start(data, kalmanFilter) else: timestamp = kalman(data, kalmanFilter, timestamp) x = kalmanFilter.x px, py, vx, vy = x[0, 0], x[1, 0], x[2, 0], x[3, 0] g = { 'timestamp': data.get_timestamp(), 'name': 'state', 'x': px, 'y': py, 'vx': vx, 'vy': vy } state_estimation = DataPoint(g) state_estimations.append(state_estimation) return state_estimations
def send_datapoints_adhoc(): sent = False queue_size = __queue_size() if queue_size > 0: try: datapoints = [] with open(__filename(), 'r') as data_file: try: json_dict = ujson.loads(data_file.readline().rstrip()) datapoints.append(DataPoint(**json_dict)) except ValueError: pass if __send_data(datapoints): cleanup() flash_led(0x008800, 10) sent = True else: flash_led(0x880000, 10) except OSError as e: print('file access error: {}'.format(e.errno)) flash_led(0x880000, 5) pass else: # no data to send flash_led(0x000088, 1) return sent
def get_state_estimations(EKF, all_sensor_data): """ Calculates all state estimations given a FusionEKF instance() and sensor measurements Args: EKF - an instance of a FusionEKF() class all_sensor_data - a list of sensor measurements as a DataPoint() instance Returns: all_state_estimations - a list of all state estimations as predicted by the EKF instance - each state estimation is wrapped in DataPoint() instance """ all_state_estimations = [] for data in all_sensor_data: EKF.process(data) x = EKF.get() px, py, vx, vy = x[0, 0], x[1, 0], x[2, 0], x[3, 0] g = { 'timestamp': data.get_timestamp(), 'name': 'state', 'x': px, 'y': py, 'vx': vx, 'vy': vy } state_estimation = DataPoint(g) all_state_estimations.append(state_estimation) return all_state_estimations
def addDataPoint(self, name, dataPointSource=None): logging.info("Add DataPoint: " + name + " to DataSet: " + self.name) if (name is None or name == ""): raise ValueError("DataPoint name cannot be none or empty") dp = DataPoint(self, name) if not dataPointSource == None: dp.setDataPointSource(dataPointSource) self.dataPoints[name] = dp logging.info("Added DataPoint: " + name + " to DataSet: " + self.name) return dp
def getClusterCentroids(self): centroids = [[DataPoint(self.d), 0] for i in range(self.k)] for point in self.dataPoints: closestCluster = min([(self.means[i].distanceTo(point), i) for i in range(self.k)])[1] point.setCluster(self.means[closestCluster]) centroids[closestCluster][0].addVector(point) centroids[closestCluster][1] += 1 return centroids
def readfile(fhd, kval): dataArray = [] for oneline in fhd: # assign each data point to a cluster randomly clu = random.randint(0, kval - 1) data = list(map(int, oneline.split())) dp = DataPoint(data, clu) dataArray.append(dp) return dataArray
def test_datasets_exist(self): # The tests. self.assertEqual(len(self.datasets), 10) # Loop through the datasets. for data_point_dir_name in sorted(self.datasets): ## The data point wrapper class object instance. dp = DataPoint(data_point_dir_name, "../tmp") input_path = "data/sr/" + dp.get_input_path() lg.info(" * Testing '%s'" % (input_path)) # Does the input directory exist? self.assertEqual(os.path.isdir(input_path), True) ## Path to the metadata file. metadata_path = (input_path + "/metadata.json").replace("//", "/") # lg.info(" * Testing '%s'..." % (metadata_path)) self.assertEqual(os.path.exists(metadata_path), True) ## Path to the pixel mask file. pixel_mask_path = (input_path + "/masked_pixels.txt").replace("//", "/") # lg.info(" * Testing '%s'..." % (pixel_mask_path)) self.assertEqual(os.path.exists(pixel_mask_path), True) ## Path to the frames information JSON. frames_path = (input_path + "/frames.json").replace("//", "/") # lg.info(" * Testing '%s'..." % (frames_path)) self.assertEqual(os.path.exists(frames_path), True) ## Path to the clusters information JSON. klusters_path = (input_path + "/klusters.json").replace("//", "/") # lg.info(" * Testing '%s'..." % (klusters_path)) self.assertEqual(os.path.exists(klusters_path), True) lg.info(" *")
def convert(filename): """ Converts file to a list of data points :param filename: :return: """ pkl_file = open(filename, 'rb') data = cPickle.load(pkl_file) matrix = csr_matrix(data).todense() return map(lambda (i, x): DataPoint(x, i), enumerate(matrix))
def __init__(self, k, inputFile): self.dataPoints = [] if verbose: print inputFile f = open(inputFile, 'r') for line in f: val = eval(line) self.d = len(val) self.dataPoints.append(DataPoint(len(val), val)) self.k = k self.means = self.dataPoints[:k]
def from_values(cls, values: List[float], associated_attributes: List[str], classlabel: str) -> 'Sample': datapoints: List[DataPoint] = list() for i in range(0, len(values)): datapoints.append( DataPoint(values[i], associated_attributes[i], classlabel)) return cls(datapoints, associated_attributes=associated_attributes, classlabel=classlabel)
def handle(self): line = str(self.rfile.readline().strip(), "utf8") logging.debug(line) fields = line.split() logging.debug(fields) sensorid, channel = fields[0].split(".") value = float(fields[1]) timestamp = int(fields[2]) dp = DataPoint(sensorid=sensorid, channel=channel, value=value, timestamp=datetime.datetime.fromtimestamp( timestamp, tz=datetime.timezone.utc)) logging.debug(dp)
def load(datafile): ct = 0 with open(datafile,'r') as fp: for line in fp: ct += 1 item = [validate(f.strip(),fmt) for f,fmt in zip(line.split(","), dataformat) ] data.append(item) dp = DataPoint(ct,item[0:-1], item[-1]) if ct % 10 == 1: testset.add(dp) else: trainset.add(dp) count = ct print "data points read: {}".format(count) print "data points in trainset: {}".format(trainset.count) print "data points in testset: {}".format(testset.count)
def lidar_data(data): lidar_time = rospy.get_time() d = { 'timestamp': lidar_time, 'name': 'lidar', 'x': data.pose.pose.position.x, 'y': data.pose.pose.position.y, 'z': data.pose.pose.position.z, 'qx': data.pose.pose.orientation.x, 'qy': data.pose.pose.orientation.y, 'qz': data.pose.pose.orientation.z, 'qw': data.pose.pose.orientation.w } d_lidar = DataPoint(d) state_estimate(EKF, d_lidar)
def imu_data(data): imu_time = rospy.get_time() d = { 'timestamp': imu_time, 'name': 'imu', 'ax': data.linear_acceleration.x, 'ay': data.linear_acceleration.y, 'az': data.linear_acceleration.z, 'qx': data.orientation.x, 'qy': data.orientation.y, 'qz': data.orientation.z, 'qw': data.orientation.w } d_imu = DataPoint(d) state_estimate(EKF, d_imu)
def store_datapoint(datapoint): sent = False queue_size = __queue_size() print('queue size: {}'.format(queue_size)) if queue_size >= __max_queue_size: datapoints = [] try: with open(__filename(), 'r') as data_file: for line in data_file: try: json_dict = ujson.loads(line.rstrip()) datapoints.append(DataPoint(**json_dict)) except ValueError as e: print('error while reading data from flash: {}'.format( e.errno)) pass datapoints.append(datapoint) if __send_datapoints(datapoints): flash_led(0x008800, 3) cleanup() sent = True else: flash_led(0x880000, 3) with open(__filename(), 'w') as data_file: __save_datapoints_to_file([datapoints]) pycom.nvs_set('queue_size', queue_size + 1) except OSError as e: print('file access error: {}'.format(e.errno)) cleanup() __save_datapoints_to_file([datapoint]) flash_led(0x888888) pycom.nvs_set('queue_size', 1) pass else: __save_datapoints_to_file([datapoint]) flash_led(0x888888) pycom.nvs_set('queue_size', queue_size + 1) return sent
def setUp(self) -> None: time_ms = 1881258.816042923 class_label = "normal" mean_id_interval = 0.011034680628272332 variance_id_frequency = 17.72437499999999 mean_id_intervals_variance = 8.354867823557838e-05 mean_data_bit_count = 14.913419913419913 variance_data_bit_count = 74.46869436479822 kurtosis_id_interval = 74.39655141184994 kurtosis_id_frequency = 1.323601387295182 skewness_id_frequency = 0.5522522478213545 kurtosis_variance_data_bit_count_id = 15.457159141707956 skewness_id_interval_variances = 0.694951701467612 self.message_expected = Message(0.000224, 809, 0, 8, bytearray(b'\x07\xa7\x7f\x8c\x11/\x00\x10')) self.datapoint_expected = DataPoint(time_ms, class_label, mean_id_interval, variance_id_frequency, mean_id_intervals_variance, mean_data_bit_count, variance_data_bit_count, kurtosis_id_interval, kurtosis_id_frequency, skewness_id_frequency, kurtosis_variance_data_bit_count_id, skewness_id_interval_variances)
def on_message(client, userdata, msg): logging.debug("Received " + msg.topic + " " + str(msg.payload)) try: structured = json.loads(str(msg.payload, "utf8")) logging.debug("Decoded: " + pformat(structured)) timestamp = time.time() if "signature" in structured: logging.debug("Found signature: " + repr(structured["signature"])) #TODO: check signature del structured["signature"] elif "token" in structured: logging.debug("Found token: " + repr(structured["token"])) #TODO: check token del structured["token"] if "timestamp_ms" in structured: timestamp = structured["timestamp_ms"] / 1000 elif "timestamp" in structured: timestamp = structured["timestamp"] for key, value in structured.items(): try: sensorid = key.split(".")[0] if sensorid != "timestamp" and sensorid != "timestamp_ms": channel = key.split(".")[1] dp = DataPoint(sensorid=sensorid, channel=channel, value=value, timestamp=datetime.datetime.fromtimestamp( timestamp, tz=datetime.timezone.utc)) logging.debug("Got data point: " + str(dp)) db.store_point(dp) except IndexError as e: logging.warning("Failed to extract channel: " + str(e)) except json.JSONDecodeError as e: logging.warning("Error decoding payload: " + str(e))
async def doer(): datas = [] await asyncio.sleep(2) print('starting') card = await cardreader.getSerial() print(card) try: lastblock = await cardreader.read(0) lastblock = int(lastblock) print(lastblock) for block in range(lastblock): try: data = DataPoint(card, await cardreader.read(block)) print(data) if data.date: datas.append(data) except Exception as ex: print(ex) cardreader.running = False except: print('please place a card on the reader') print(datas) quit()
def gps_data(data): x = data.latitude y = data.longitude co_or = utm.from_latlon(x, y) px = co_or[0] - utmOffset[0] py = co_or[1] - utmOffset[1] gps_time = rospy.get_time() p_measured = np.array([px, py, 0]) d = {'timestamp': gps_time, 'name': 'gps', 'x': px, 'y': py} d_gps = DataPoint(d) gps_pose.header.stamp = rospy.Time.now() gps_pose.header.frame_id = '/camera_init' gps_pose.pose.position.x = px gps_pose.pose.position.y = py state_estimate(EKF, d_gps)
async def doer(): datas = [] await asyncio.sleep(2) print('starting') card = await reader.getSerial() print(f'found {card}') try: lastblock = await reader.read(0) lastblock = int(lastblock) print(f'reading {lastblock-1} blocks') for block in range(1, lastblock): try: data = DataPoint(card, await reader.read(block)) print(f'{block}.\t {data}') if data.date: datas.append(data) except Exception as ex: print(ex) if input("Do you want to clear the card? : ") in ['y', 'yes']: await reader.formatcard() else: print('closing') # reader.running = False except: print('please place a card on the reader') print(datas) try: filename = ''.join([c for c in card if c != ':']) f = open(filename + ".json", "w+") ddict = [d.__dict__ for d in datas] f.write(json.dumps(ddict)) f.close() except Exception as ex: print(ex) quit()
def parse_data(file_path, ENCODE=False): """ Args: file_path - path to a text file with all data. - each line should have the following format: [SENSOR ID] [SENSOR RAW VALUES] [TIMESTAMP] [GROUND TRUTH VALUES] Whereas radar has three measurements (rho, phi, rhodot), lidar has two measurements (x, y). Specifically: For a row containing radar data, the columns are: sensor_type, rho_measured, phi_measured, rhodot_measured, timestamp, x_groundtruth, y_groundtruth, vx_groundtruth, vy_groundtruth For a row containing lidar data, the columns are: sensor_type, x_measured, y_measured, timestamp, x_groundtruth, y_groundtruth, vx_groundtruth, vy_groundtruth Example 1: line with three measurements from a radar sensor in polar coordinate followed by a timestamp in unix time followed by the the "ground truth" which is actual real position and velocity in cartesian coordinates (four state variables) R 8.46642 0.0287602 -3.04035 1477010443399637 8.6 0.25 -3.00029 0 (R) (rho) (phi) (drho) (timestamp) (real x) (real y) (real vx) (real vy) Example 2: line with two measurements from a lidar sensor in cartesian coordinates followed by a timestamp in unix time followed by the the "ground truth" which is the actual real position and velocity in cartesian coordinates (four state variables) L 8.44818 0.251553 1477010443449633 8.45 0.25 -3.00027 0 Returns: all_sensor_data, all_ground_truths - two lists of DataPoint() instances """ all_sensor_data = [] all_ground_truths = [] if (ENCODE): message = 0 with open(file_path) as f: for line in f: data = line.split() if data[0] == 'L': data_read = { 'timestamp': int(data[3]), 'name': 'lidar', 'x': float(data[1]), 'y': float(data[2]), 'vx': 0.0, 'vy': 0.0 } # print('helpers:parse_data: data read LiDAR', data_read['x'],data_read['y'],data_read['vx'],data_read['vx']) sensor_data = DataPoint(data_read) # print('helpers:parse_data: data read LiDAR', sensor_data.data[0],sensor_data.data[1],sensor_data.data[2],sensor_data.data[3] ) g = { 'timestamp': int(data[3]), 'name': 'state', 'x': float(data[4]), 'y': float(data[5]), 'vx': float(data[6]), 'vy': float(data[7]) } ground_truth = DataPoint(g) # print('helpers:parse_data: Ground truth LiDAR', ground_truth.data[0],ground_truth.data[1], ground_truth.data[2], ground_truth.data[3] ) elif data[0] == 'R': #herer sensor_data is the output of the DataPoint class if (ENCODE): #Here we implement the QIM if used as an option #convert to cartesian x, y, vx, vy = polar_to_cartesian(float(data[1]), float(data[2]), float(data[3])) #embedd data - only modify x,y x, y = QIM_encode_twobit(np.array([x, y]), message) #print('helpers:parse_data: encoded data read raDAR', sensor_data.data[0],sensor_data.data[1],sensor_data.data[2],sensor_data.data[3] ) message += 1 message %= 4 #convert to polar rhho, phhi, derho = cartesian_to_polar(x, y, vx, vy) data_read = { 'timestamp': int(data[4]), 'name': 'radar', 'rho': float(rhho), 'phi': float(phhi), 'drho': float(derho) } else: data_read = { 'timestamp': int(data[4]), 'name': 'radar', 'rho': float(data[1]), 'phi': float(data[2]), 'drho': float(data[3]) } sensor_data = DataPoint(data_read) #print('helpers:parse_data: data read raDAR', sensor_data.data[0],sensor_data.data[1],sensor_data.data[2],sensor_data.data[3] ) g = { 'timestamp': int(data[4]), 'name': 'state', 'x': float(data[5]), 'y': float(data[6]), 'vx': float(data[7]), 'vy': float(data[8]) } ground_truth = DataPoint(g) # print('helpers:parse_data: Ground truth raDAR',ground_truth.data[0],ground_truth.data[1], ground_truth.data[2], ground_truth.data[3] ) all_sensor_data.append(sensor_data) all_ground_truths.append(ground_truth) return all_sensor_data, all_ground_truths
def parse_data(file_path): """ Args: file_path - path to a text file with all data. - each line should have the following format: [SENSOR ID] [SENSOR RAW VALUES] [TIMESTAMP] [GROUND TRUTH VALUES] Whereas radar has three measurements (rho, phi, rhodot), lidar has two measurements (x, y). Specifically: For a row containing radar data, the columns are: sensor_type, rho_measured, phi_measured, rhodot_measured, timestamp, x_groundtruth, y_groundtruth, vx_groundtruth, vy_groundtruth For a row containing lidar data, the columns are: sensor_type, x_measured, y_measured, timestamp, x_groundtruth, y_groundtruth, vx_groundtruth, vy_groundtruth Example 1: line with three measurements from a radar sensor in polar coordinate followed by a timestamp in unix time followed by the the "ground truth" which is actual real position and velocity in cartesian coordinates (four state variables) R 8.46642 0.0287602 -3.04035 1477010443399637 8.6 0.25 -3.00029 0 (R) (rho) (phi) (drho) (timestamp) (real x) (real y) (real vx) (real vy) Example 2: line with two measurements from a lidar sensor in cartesian coordinates followed by a timestamp in unix time followed by the the "ground truth" which is the actual real position and velocity in cartesian coordinates (four state variables) L 8.44818 0.251553 1477010443449633 8.45 0.25 -3.00027 0 Returns: all_sensor_data, all_ground_truths - two lists of DataPoint() instances """ all_sensor_data = [] all_ground_truths = [] with open(file_path) as f: for line in f: data = line.split() if data[0] == 'L': sensor_data = DataPoint({ 'timestamp': int(data[3]), 'name': 'lidar', 'x': float(data[1]), 'y': float(data[2]) }) g = { 'timestamp': int(data[3]), 'name': 'state', 'x': float(data[4]), 'y': float(data[5]), 'vx': float(data[6]), 'vy': float(data[7]) } ground_truth = DataPoint(g) elif data[0] == 'R': sensor_data = DataPoint({ 'timestamp': int(data[4]), 'name': 'radar', 'rho': float(data[1]), 'phi': float(data[2]), 'drho': float(data[3]) }) g = { 'timestamp': int(data[4]), 'name': 'state', 'x': float(data[5]), 'y': float(data[6]), 'vx': float(data[7]), 'vy': float(data[8]) } ground_truth = DataPoint(g) all_sensor_data.append(sensor_data) all_ground_truths.append(ground_truth) return all_sensor_data, all_ground_truths
def parse_data(file_path): """ Args: file_path radar has three measurements (rho, phi, rhodot), lidar has two measurements (x, y). For a row containing radar data, the columns are: sensor_type, rho_measured, phi_measured, rhodot_measured, timestamp, x_groundtruth, y_groundtruth, vx_groundtruth, vy_groundtruth For a row containing lidar data, the columns are: sensor_type, x_measured, y_measured, timestamp, x_groundtruth, y_groundtruth, vx_groundtruth, vy_groundtruth """ all_sensor_data = [] all_ground_truths = [] with open(file_path) as f: for line in f: data = line.split() if data[0] == 'L': sensor_data = DataPoint({ 'timestamp': int(data[3]), 'name': 'lidar', 'x': float(data[1]), 'y': float(data[2]) }) g = { 'timestamp': int(data[3]), 'name': 'state', 'x': float(data[4]), 'y': float(data[5]), 'vx': float(data[6]), 'vy': float(data[7]) } ground_truth = DataPoint(g) elif data[0] == 'R': sensor_data = DataPoint({ 'timestamp': int(data[4]), 'name': 'radar', 'rho': float(data[1]), 'phi': float(data[2]), 'drho': float(data[3]) }) g = { 'timestamp': int(data[4]), 'name': 'state', 'x': float(data[5]), 'y': float(data[6]), 'vx': float(data[7]), 'vy': float(data[8]) } ground_truth = DataPoint(g) all_sensor_data.append(sensor_data) all_ground_truths.append(ground_truth) return all_sensor_data, all_ground_truths
light = lt.light() print("Light (channel Blue lux, channel Red lux): " + str(light[0] / 2. + light[1] / 2.)) print("Acceleration: " + str(li.acceleration())) print("Roll: " + str(li.roll())) print("Pitch: " + str(li.pitch())) print("Battery voltage: " + str(py.read_battery_voltage())) print("\nSending data to ThingSpeak...") time_alive = alive_timer.read_ms() timestamp = utime.time() datapoint = DataPoint(timestamp=timestamp, temp_mp=temp_mp, temp_si=temp_si, humidity=humidity, altitude=altitude, pressure=pressure, dewpoint=dewpoint, light=light[0] / 2. + light[1] / 2., humidity_ambient=humidity_ambient) send_to_thingspeak(datapoint) print("done!\n") # Go to sleep, 10 minutes minus 10 seconds tear_down(alive_timer, 590 * 1000)