Example #1
0
    def test_dataset_val(self):
        config = LocalConfig()
        dataset = Dataset().get_val()
        for batch in dataset.take(1):
            image = ((batch[0].numpy()[0, :, :, :] + 1) / 2 * 255).astype(
                np.int32)
            output = np.sum(image)
            expected = np.array(8123856, dtype=np.int32)
            self.assertAllCloseAccordingToType(expected, output)
            self.assertEqual(config.ENCODER_INPUT_SHAPE, image.shape)

            kp2d = ((batch[1].numpy()[0, :, :2] + 1) / 2 *
                    image.shape[:2]).astype(np.int32)
            output = np.sum(kp2d)
            expected = np.array(4040, dtype=np.int32)
            self.assertAllCloseAccordingToType(expected, output)
            self.assertEqual((config.NUM_KP2D, 2), kp2d.shape)

            vis = batch[1].numpy()[0, :, 2].astype(np.int32)
            output = np.sum(vis)
            expected = np.array(17, dtype=np.int32)
            self.assertAllCloseAccordingToType(expected, output)
            self.assertEqual((config.NUM_KP2D, ), vis.shape)

            kp3d = batch[2].numpy()[0, :, :]
            output = np.sum(kp3d)
            expected = np.array(56.9217948, dtype=np.float32)
            self.assertAllCloseAccordingToType(expected, output)
            self.assertEqual((config.NUM_KP3D, 3), kp3d.shape)

            # check if has3d flag is included in correct shape
            has3d = batch[3].numpy()[0]
            self.assertEqual(tf.constant(1, tf.int64), has3d)
            self.assertEqual((config.BATCH_SIZE, ), batch[3].shape)
Example #2
0
    def train(self):
        # Place tensors on the CPU
        with tf.device('/CPU:0'):
            dataset = Dataset()
            ds_train = dataset.get_train()
            ds_smpl = dataset.get_smpl()
            ds_val = dataset.get_val()

        start = 1
        if self.config.RESTORE_EPOCH:
            start = self.config.RESTORE_EPOCH

        for epoch in range(start, self.config.EPOCHS + 1):

            start = time.time()
            print('Start of Epoch {}'.format(epoch))

            dataset_train = ExceptionHandlingIterator(
                tf.data.Dataset.zip((ds_train, ds_smpl)))
            total = int(self.config.NUM_TRAINING_SAMPLES /
                        self.config.BATCH_SIZE)

            for image_data, theta in tqdm(dataset_train,
                                          total=total,
                                          position=0,
                                          desc='training'):
                images, kp2d, kp3d, has3d = image_data[0], image_data[
                    1], image_data[2], image_data[3]
                self._train_step(images, kp2d, kp3d, has3d, theta)

            self._log_train(epoch=epoch)

            total = int(self.config.NUM_VALIDATION_SAMPLES /
                        self.config.BATCH_SIZE)
            for image_data in tqdm(ds_val,
                                   total=total,
                                   position=0,
                                   desc='validate'):
                images, kp2d, kp3d, has3d = image_data[0], image_data[
                    1], image_data[2], image_data[3]
                self._val_step(images, kp2d, kp3d, has3d)

            self._log_val(epoch=epoch)

            print('Time taken for epoch {} is {} sec\n'.format(
                epoch,
                time.time() - start))

            # saving (checkpoint) the model every 5 epochs
            if epoch % 5 == 0:
                print('saving checkpoint\n')
                self.checkpoint_manager.save(epoch)

        self.summary_writer.flush()
        self.checkpoint_manager.save(self.config.EPOCHS + 1)
Example #3
0
    def test(self):
        """Run evaluation of the model
        Specify LOG_DIR to point to the saved checkpoint directory
        """

        if self.restore_check is None:
            raise RuntimeError(
                'restore did not succeed, pleas check if you set config.LOG_DIR correctly'
            )

        self.restore_check.assert_existing_objects_matched(
        ).assert_nontrivial_match()

        # Place tensors on the CPU
        with tf.device('/CPU:0'):
            dataset = Dataset()
            ds_test = dataset.get_test()

        start = time.time()
        print('Start of Testing')

        mpjpe, mpjpe_aligned, sequences = [], [], []

        total = int(self.config.NUM_TEST_SAMPLES / self.config.BATCH_SIZE)
        for image_data in tqdm(ds_test,
                               total=total,
                               position=0,
                               desc='testing'):
            image, kp3d, sequence = image_data[0], image_data[1], image_data[2]
            kp3d_mpjpe, kp3d_mpjpe_aligned = self._test_step(image, kp3d)

            mpjpe.append(kp3d_mpjpe)
            mpjpe_aligned.append(kp3d_mpjpe_aligned)
            sequences.append(sequence)

        print('Time taken for testing {} sec\n'.format(time.time() - start))

        def convert(tensor, num=None):
            if num is None:
                num = self.config.NUM_KP3D
            return tf.squeeze(tf.reshape(tf.stack(tensor), [-1, num]))

        mpjpe, mpjpe_aligned, sequences = convert(mpjpe), convert(
            mpjpe_aligned), convert(sequences, 1)
        result_dict = {
            "kp3d_mpjpe": mpjpe,
            "kp3d_mpjpe_aligned": mpjpe_aligned,
            "seq": sequences,
        }

        return result_dict
Example #4
0
    def __init__(self):
        self.is_data_posted = False
        self.log = Log()  # log file to test and debug (writes debug messages)
        self.log_data = LogData()  # sensor data log (if no internet available)
        self.api = SamsApi()  # https://sams.science.itf.llu.lv/ Data Warehouse Plugin to send the data
        self.config = Config()  # Configurations (/config/config.ini)
        self.config_data = self.config.get_config_data()
        self.repost_seconds = int(self.config_data['INTERVAL']['repost_seconds'])
        self.app_wait_time = int(self.config_data['INTERVAL']['app_wait_seconds'])
        self.dataset_taken = False
        self.dataset_taken_counter = 0

        self.data = Dataset()  # collect all the data from sensors
        self.dataset = ""
Example #5
0
    def __init__(self):
        self.token_handler = TokenHandler()
        self.app_config = ApplicationConfig()
        self.mic = MicrophoneHelper()
        self.dataset = Dataset()
        self.color_print = Color()
        self.sensors = []

        if self.app_config.local_config.is_dht22:
            self.sensors.append("dht22")
        if self.app_config.local_config.is_ds18b20:
            self.sensors.append("ds18b20")
        if self.app_config.local_config.is_scale:
            self.sensors.append("scale")
Example #6
0
    def test_dataset_smpl(self):
        config = LocalConfig()
        dataset = Dataset().get_smpl()
        for batch in dataset.take(1):
            shape = (config.BATCH_SIZE * config.ITERATIONS, (config.NUM_POSE_PARAMS + config.NUM_SHAPE_PARAMS))
            self.assertEqual(shape, batch.shape)

            pose = batch[0].numpy()[:config.NUM_POSE_PARAMS:]
            mean = tf.reduce_mean(pose)
            expected = np.array(0.0411809, dtype=np.float32)
            self.assertAllCloseAccordingToType(expected, mean)

            shape = batch[0].numpy()[-config.NUM_SHAPE_PARAMS:]
            mean = tf.reduce_mean(shape)
            expected = np.array(0.12554605, dtype=np.float32)
            self.assertAllCloseAccordingToType(expected, mean)
Example #7
0
    def test_dataset_test(self):
        config = LocalConfig()
        dataset = Dataset().get_test()
        for batch in dataset.take(1):
            image = ((batch[0].numpy()[0, :, :, :] + 1) / 2 * 255).astype(np.int32)
            output = np.sum(image)
            expected = np.array(10050903, dtype=np.int32)
            self.assertAllCloseAccordingToType(expected, output)
            self.assertEqual(config.ENCODER_INPUT_SHAPE, image.shape)

            kp3d = batch[1].numpy()[0, :, :]
            output = np.sum(kp3d)
            expected = np.array(38780.2031, dtype=np.float32)
            self.assertAllCloseAccordingToType(expected, output)
            self.assertEqual((config.NUM_KP3D, 3), kp3d.shape)

            # check if sequence flag is included in correct shape
            sequence = batch[2].numpy()[0].decode("utf-8")
            self.assertEqual('TS1', sequence)
Example #8
0
class Application:
    def __init__(self):
        self.is_data_posted = False
        self.log = Log()  # log file to test and debug (writes debug messages)
        self.log_data = LogData()  # sensor data log (if no internet available)
        self.api = SamsApi()  # https://sams.science.itf.llu.lv/ Data Warehouse Plugin to send the data
        self.config = Config()  # Configurations (/config/config.ini)
        self.config_data = self.config.get_config_data()
        self.repost_seconds = int(self.config_data['INTERVAL']['repost_seconds'])
        self.app_wait_time = int(self.config_data['INTERVAL']['app_wait_seconds'])
        self.dataset_taken = False
        self.dataset_taken_counter = 0

        self.data = Dataset()  # collect all the data from sensors
        self.dataset = ""

    def take_dataset(self):
        self.dataset = ""  # empty the dataset before take new data
        self.dataset = self.data.get_dataset()

    def start(self):
        while True:
            try:
                self.log.write_log("take dataset")
                self.take_dataset()
                # if stored data (/log/*.json) available, then try to send this data to the data warehouse
                if self.log_data.has_log_files():
                    self.log.write_log("has log files")
                    self.log_data.post_log_files(self.dataset)
                # if not, take a new dataset to post
                else:
                    response = self.api.call(self.dataset)
                    # try to post data. If api status is 200 then everything is right
                    if response == 200:
                        self.log.write_log("dataset posted")
                    # if no internet connection or the api do not allow to send, then store the data
                    # if the status code from api is 500 then the log function will delete the file
                    else:
                        self.log.write_log("dataset posting failed. Statuscode: {0}".format(response))
                        self.is_data_posted = False  # data where not posted
                        self.log.write_log("log dataset")
                        self.log_data.insert(self.dataset)  # write new log file with the dataset
                        # try to post every X seconds while the data is not posted (no internet connection)
                        while not self.is_data_posted:
                            self.is_data_posted = self.api.call(self.dataset)
                            time.sleep(self.repost_seconds)
                self.log.write_log("wait: {}".format(self.app_wait_time))
                self.dataset_taken = False
                time.sleep(int(self.app_wait_time))  # sleep X seconds before collecting the new data
            except Exception as e:
                print(e)
                self.log.write_log(e)
Example #9
0
    def __init__(self):
        self.dataset_helper = DatasetLogHelper(
        )  # saving and sends the dataset
        self.dataset = Dataset()  # dataset for take sensor data
        self.app_config = ApplicationConfig(
        )  # configuration data (on- and offline)
        self.wifi_helper = WifiHelper(
        )  # gets signal strength for debug purpose
        self.attempts = 0
        self.dwh_api = DataApi()
        self.token_handler = TokenHandler()
        self.error_helper = ErrorHelper()
        self.failed_sensor = ""
        self.handle_online_status()
        self.checker = SelfChecker()

        # send status:
        try:
            send_log(
                f'Start Application: {self.app_config.local_config.version}',
                "debug")
            send_log(f'Config Name: {self.app_config.local_config.group}',
                     "debug")
            send_log(
                f'Signal Strength: {self.wifi_helper.get_signal_strength()}',
                "debug")
            if self.current_volt():
                send_log(f'Voltage: {self.current_volt()}', "debug")

            set_timezone(self.app_config.local_config.timezone)

            for file, status in self.checker.check_files().items():
                send_log(f"created file: {file}.", "warning")
            for failed_sensor in self.error_helper.get_sensors_with_errors():
                send_log(
                    f'Please check {str(failed_sensor)} and reset all errors to reactivate the sensor.',
                    "warning")
        except Exception as e:
            print(e)
Example #10
0
    def test_dataset_train(self):
        config = LocalConfig()
        dataset = Dataset().get_train()
        for batch in dataset.take(1):
            image = ((batch[0].numpy()[0, :, :, :] + 1) / 2 * 255).astype(
                np.int32)
            output = np.sum(image)
            expected = np.array(6991299, dtype=np.int32)
            self.assertAllCloseAccordingToType(
                expected, output)  # this can sometimes fail with output=66
            self.assertEqual(config.BATCH_SIZE, batch[0].shape[0])
            self.assertEqual(config.ENCODER_INPUT_SHAPE, batch[0].shape[1:])

            kp2d = ((batch[1].numpy()[0, :, :2] + 1) / 2 *
                    image.shape[:2]).astype(np.int32)
            output = np.sum(kp2d)
            expected = np.array(3818, dtype=np.int32)
            self.assertAllCloseAccordingToType(expected, output)
            self.assertEqual((config.BATCH_SIZE, config.NUM_KP2D, 3),
                             batch[1].shape)

            vis = batch[1].numpy()[0, :, 2].astype(np.int32)
            output = np.sum(vis)
            expected = np.array(17, dtype=np.int32)
            self.assertAllCloseAccordingToType(expected, output)

            kp3d = batch[2].numpy()[0, :, :]
            output = np.sum(kp3d)
            expected = np.array(4.11272e-06, dtype=np.float32)
            self.assertAllCloseAccordingToType(expected, output)
            self.assertEqual((config.BATCH_SIZE, config.NUM_KP3D, 3),
                             batch[2].shape)

            # check if has3d flag is included in correct shape
            has3d = batch[3].numpy()[0]
            self.assertEqual(tf.constant(1, tf.int64), has3d)
            self.assertEqual((config.BATCH_SIZE, ), batch[3].shape)
Example #11
0
    from mpl_toolkits.mplot3d import Axes3D  # noqa: F401 unused import

    class DastasetConfig(LocalConfig):
        # DATA_DIR = join('/', 'data', 'ssd1', 'russales', 'new_records')
        # DATASETS = ['coco'] #['lsp', 'lsp_ext', 'mpii', 'coco', 'mpii_3d', 'h36m']
        # SMPL_DATASETS = ['cmu', 'joint_lim']
        TRANS_MAX = 20

    # class Config is implemented as singleton, inizialize subclass first!
    config = DastasetConfig()

    import tensorflow as tf

    # Place tensors on the CPU
    with tf.device('/CPU:0'):
        dataset = Dataset()
        ds_train = dataset.get_train()
        ds_smpl = dataset.get_smpl()
        ds_val = dataset.get_val()

    import matplotlib.pyplot as plt

    for images, kp2d, kp3d, has3d in ds_train.take(1):
        fig = plt.figure(figsize=(9.6, 5.4))
        image_orig = tf.image.decode_jpeg(images[0], channels=3)
        image_orig = image_orig.numpy()
        kp2d = kp2d[0].numpy()
        ax0 = fig.add_subplot(111)
        image_2d = draw_2d_on_image(image_orig, kp2d[:, :2], vis=kp2d[:, 2])
        ax0.imshow(image_2d)
Example #12
0
    def test(self, return_kps=False):
        """Run evaluation of the model
        Specify LOG_DIR to point to the saved checkpoint directory

        Args:
            return_kps: set to return keypoints - default = False
        """

        if self.restore_check is None:
            raise RuntimeError(
                'restore did not succeed, pleas check if you set config.LOG_DIR correctly'
            )

        if self.config.INITIALIZE_CUSTOM_REGRESSOR:
            self.restore_check.assert_nontrivial_match()
        else:
            self.restore_check.assert_existing_objects_matched(
            ).assert_nontrivial_match()

        # Place tensors on the CPU
        with tf.device('/CPU:0'):
            dataset = Dataset()
            ds_test = dataset.get_test()

        start = time.time()
        print('Start of Testing')

        mpjpe, mpjpe_aligned, sequences, kps3d_pred, kps3d_real = [], [], [], [], []

        total = int(self.config.NUM_TEST_SAMPLES / self.config.BATCH_SIZE)
        for image_data in tqdm(ds_test,
                               total=total,
                               position=0,
                               desc='testing'):
            image, kp3d, sequence = image_data[0], image_data[1], image_data[2]
            kp3d_mpjpe, kp3d_mpjpe_aligned, predict_kp3d, real_kp3d = self._test_step(
                image, kp3d, return_kps=return_kps)

            if return_kps:
                kps3d_pred.append(predict_kp3d)
                kps3d_real.append(real_kp3d)

            mpjpe.append(kp3d_mpjpe)
            mpjpe_aligned.append(kp3d_mpjpe_aligned)
            sequences.append(sequence)

        print('Time taken for testing {} sec\n'.format(time.time() - start))

        def convert(tensor, num=None, is_kp=False):
            if num is None:
                num = self.config.NUM_KP3D
            if is_kp:
                return tf.squeeze(tf.reshape(tf.stack(tensor), [-1, num, 3]))

            return tf.squeeze(tf.reshape(tf.stack(tensor), [-1, num]))

        mpjpe, mpjpe_aligned, sequences = convert(mpjpe), convert(
            mpjpe_aligned), convert(sequences, 1)
        result_dict = {
            "kp3d_mpjpe": mpjpe,
            "kp3d_mpjpe_aligned": mpjpe_aligned,
            "seq": sequences,
        }

        if return_kps:
            kps3d_pred, kps3d_real = convert(kps3d_pred,
                                             is_kp=True), convert(kps3d_real,
                                                                  is_kp=True)
            result_dict.update({
                'kps3d_pred': kps3d_pred,
                'kps3d_real': kps3d_real
            })

        return result_dict
Example #13
0
import numpy as np

if __name__ == '__main__':

    text = '花菇历来被国人作为延年益寿的补品,是香菇中的上品,含有丰富的营养价值,可帮助调节人体新陈代谢,助消化'
    text = '冬天到了,吃什么比较好'
    text = '天气冷了吃什么比较好'
    text = '天气冷了'
    text = '小孩'
    text = '美容'
    text = '老公'

    conf = configparser.ConfigParser()
    conf.read("./conf.ini")

    ds = Dataset(conf)

    # # prepro

    # ds.segment_data('./data/baidu_recipe.json',
    #                 './data/baidu_recipe_segment.json',
    #                 ["title", "illustration", "practice", "materials"],
    #                 True)
    # ds.ngram_data('./data/baidu_recipe_segment.json',
    #               './data/baidu_recipe_ngram.json',
    #               ["title", "illustration", "practice", "materials"])
    #
    # ds.segment_data('./data/recipe_label_data.json',
    #                 './data/recipe_segment.json',
    #                 ["title", "illustration", "practice", "materials"],
    #                 True)
Example #14
0
class AppTest:
    def __init__(self):
        self.token_handler = TokenHandler()
        self.app_config = ApplicationConfig()
        self.mic = MicrophoneHelper()
        self.dataset = Dataset()
        self.color_print = Color()
        self.sensors = []

        if self.app_config.local_config.is_dht22:
            self.sensors.append("dht22")
        if self.app_config.local_config.is_ds18b20:
            self.sensors.append("ds18b20")
        if self.app_config.local_config.is_scale:
            self.sensors.append("scale")

    def app_status(self):
        # liefert den aktuellen Status der App zurück
        # wieviele failed attempts
        # führt messung durch oder wartet
        # läuft oder läuft nicht
        pass

    def token_test(self):
        # liefert true wenn user credentials richtig und valides token erhalten
        pass

    def config_test(self):
        # liefert true wenn konfigurationsdatei ist da und kann mit dem dw synchronisiert werden
        pass

    @staticmethod
    def get_available_space():
        path = '/'
        bytes_avail = psutil.disk_usage(path).free
        gigabytes_avail = bytes_avail / 1024 / 1024 / 1024

        return round(gigabytes_avail, 2)

    def dataset_test(self):
        self.color_print.bold("Starting test....")
        test_data = {}
        for sensor in self.sensors:
            testdata = self.dataset.get_data(sensor)
            if testdata:
                for data in testdata:
                    if hasattr(data, '__len__'):
                        test_data[sensor] = True
                        self.color_print.ok_green(f'{sensor} ..........OK!')
                    else:
                        test_data[sensor] = False
                        self.color_print.fail(f'{sensor} ..........FAILED!')
            else:
                test_data[sensor] = False
                self.color_print.fail(f'{sensor} ..........FAILED!')

        if self.app_config.local_config.is_microphone:
            mic_test = self.mic.get_fft_data()
            if mic_test:
                test_data["microphone"] = True
                self.color_print.ok_green("Microphone ..........OK!")
            else:
                test_data["microphone"] = False
                self.color_print.fail("Microphone ..........FAILED!")

        return test_data
Example #15
0
class Application:
    def __init__(self):
        self.dataset_helper = DatasetLogHelper(
        )  # saving and sends the dataset
        self.dataset = Dataset()  # dataset for take sensor data
        self.app_config = ApplicationConfig(
        )  # configuration data (on- and offline)
        self.wifi_helper = WifiHelper(
        )  # gets signal strength for debug purpose
        self.attempts = 0
        self.dwh_api = DataApi()
        self.token_handler = TokenHandler()
        self.error_helper = ErrorHelper()
        self.failed_sensor = ""
        self.handle_online_status()
        self.checker = SelfChecker()

        # send status:
        try:
            send_log(
                f'Start Application: {self.app_config.local_config.version}',
                "debug")
            send_log(f'Config Name: {self.app_config.local_config.group}',
                     "debug")
            send_log(
                f'Signal Strength: {self.wifi_helper.get_signal_strength()}',
                "debug")
            if self.current_volt():
                send_log(f'Voltage: {self.current_volt()}', "debug")

            set_timezone(self.app_config.local_config.timezone)

            for file, status in self.checker.check_files().items():
                send_log(f"created file: {file}.", "warning")
            for failed_sensor in self.error_helper.get_sensors_with_errors():
                send_log(
                    f'Please check {str(failed_sensor)} and reset all errors to reactivate the sensor.',
                    "warning")
        except Exception as e:
            print(e)

    def start(self):
        while True:
            try:
                # before do anything, handle on / offline status and try to sync config
                self.handle_online_status()
                sensors = []
                if not self.app_config.local_config.ignore_error:  # if not ignore error
                    if self.app_config.local_config.is_ds18b20 and not self.error_helper.has_error(
                            "DS18B20"):
                        sensors.append("ds18b20")  # TEMP SENSOR
                    if self.app_config.local_config.is_dht22 and not self.error_helper.has_error(
                            "DHT22"):
                        sensors.append("dht22")  # TEMP and HUMIDITY SENSOR
                    if self.app_config.local_config.is_scale and not self.error_helper.has_error(
                            "SCALE"):
                        sensors.append("scale")
                    if self.app_config.local_config.is_microphone and not self.error_helper.has_error(
                            "MICROPHONE"):
                        sensors.append("microphone")
                else:  # if ignore all errors
                    if self.app_config.local_config.is_ds18b20:
                        sensors.append("ds18b20")  # TEMP SENSOR
                    if self.app_config.local_config.is_dht22:
                        sensors.append("dht22")  # TEMP and HUMIDITY SENSOR
                    if self.app_config.local_config.is_scale:
                        sensors.append("scale")
                    if self.app_config.local_config.is_microphone:
                        sensors.append("microphone")

                # START GET AND SEND DATASET BLOCK
                for sensor in sensors:
                    dataset = self.dataset.get_data(
                        sensor)  # get dataset from sensor
                    if dataset:
                        for x in range(len(dataset)):
                            if not dataset[x] or not hasattr(
                                    dataset[x], '__len__'):
                                self.sensor_error(sensor.upper())
                            else:
                                response = self.dwh_api.send_data(dataset[x])
                                if not response:
                                    self.dataset_helper.insert(
                                        dataset[x])  # save data
                    else:
                        self.sensor_error(sensor.upper())

                # END DATASET BLOCK ###

                # START POST LOG FILES ####
                if self.wifi_helper.is_online():
                    response = self.dataset_helper.post_log_files()
                    if not response:
                        self.attempts += 1
                # END POST LOG FILES ####

                # START CHECKING FAILED ATTEMPTS BLOCK
                if int(self.attempts) >= int(self.app_config.local_config.
                                             interval_attempts_before_restart):
                    self.error_helper.set_sensor_restarted(self.failed_sensor)
                    self.restart_hive("Too many errors: reboot system!",
                                      "error")
                # END FAILED ATTEMPTS BLOCK ###

                # START CHECKING UPDATE
                if self.wifi_helper.is_online(
                ) and self.app_config.local_config.auto_update:
                    self.update()
                # END CHECKING UPDATE

                # START AUTO SHUTDOWN BLOCK
                if self.app_config.local_config.auto_shutdown:
                    send_log(f'Power off. System time: {str(get_time())}',
                             "debug")
                    time.sleep(30)
                    os.system("sudo poweroff")
                # END AUTO SHUTDOWN BLOCK ###

                # WAIT BEFORE TAKE NEW DATASET
                time.sleep(
                    int(self.app_config.local_config.interval_app_wait_seconds)
                )
                # END WAIT ###

            except Exception as e:
                print(f'app crashed: {e}')
                self.restart_hive(
                    f'Application crashed! Error: {e}. Reboot System', "error")

    @staticmethod
    def restart_hive(message, level):
        send_log(message, level)
        time.sleep(120)
        os.system('sudo reboot')

    def sensor_error(self, sensor):  # sensor is offline or sends no valid data
        self.attempts += 1
        self.failed_sensor = str(sensor)
        if os.path.exists(mapping.witty_pi):
            self.error_helper.set_sensor_with_error(sensor)
            self.error_helper.set_sensor_restarted(sensor)
        else:
            self.error_helper.set_sensor_with_error(sensor)

        send_log(f'{sensor} failed!', "error")

    def update(self):
        try:
            r = requests.get(mapping.version_url)
            data = r.json()
            git_version = data['files']['version']['content']
            old_version = self.app_config.local_config.version

            if float(git_version) > float(
                    self.app_config.local_config.version):
                if os.path.exists(mapping.update_file):
                    os.remove(mapping.update_file)
                update_file = requests.get(mapping.update_file_url)
                with open(mapping.update_file, 'wb+') as f:
                    f.write(update_file.content)
                self.app_config.local_config.set_update()
                self.app_config.local_config.set_config_data(
                    "DEFAULT", "version", str(git_version))
                self.restart_hive(
                    f"update from {old_version} to {git_version}", "debug")

        except Exception as e:
            print(e)

    def handle_online_status(self):
        try:
            # check if system has valid access token
            # write online status
            if not self.token_handler.get_access_token():
                self.wifi_helper.update_online_status(False)
            else:
                self.wifi_helper.update_online_status(True)
            # check online status
            # sync config (on- or offline)
            if self.wifi_helper.is_online():
                self.app_config.sync_config()
            else:
                self.app_config.local_config.get_config_data()
        except Exception as e:
            print(e)

    @staticmethod
    def current_volt():
        try:
            v_out = []
            with open(mapping.witty_pi_log) as f:
                for i, line in enumerate(f):
                    if 'Current Vout' in line:
                        v_out.append(line)
            length = len(v_out) - 1

            return v_out[length][:-1]
        except Exception:
            return False