Ejemplo n.º 1
0
    def test_dataset_val(self):
        config = LocalConfig()
        dataset = Dataset().get_val()
        for batch in dataset.take(1):
            image = ((batch[0].numpy()[0, :, :, :] + 1) / 2 * 255).astype(
                np.int32)
            output = np.sum(image)
            expected = np.array(8123856, dtype=np.int32)
            self.assertAllCloseAccordingToType(expected, output)
            self.assertEqual(config.ENCODER_INPUT_SHAPE, image.shape)

            kp2d = ((batch[1].numpy()[0, :, :2] + 1) / 2 *
                    image.shape[:2]).astype(np.int32)
            output = np.sum(kp2d)
            expected = np.array(4040, dtype=np.int32)
            self.assertAllCloseAccordingToType(expected, output)
            self.assertEqual((config.NUM_KP2D, 2), kp2d.shape)

            vis = batch[1].numpy()[0, :, 2].astype(np.int32)
            output = np.sum(vis)
            expected = np.array(17, dtype=np.int32)
            self.assertAllCloseAccordingToType(expected, output)
            self.assertEqual((config.NUM_KP2D, ), vis.shape)

            kp3d = batch[2].numpy()[0, :, :]
            output = np.sum(kp3d)
            expected = np.array(56.9217948, dtype=np.float32)
            self.assertAllCloseAccordingToType(expected, output)
            self.assertEqual((config.NUM_KP3D, 3), kp3d.shape)

            # check if has3d flag is included in correct shape
            has3d = batch[3].numpy()[0]
            self.assertEqual(tf.constant(1, tf.int64), has3d)
            self.assertEqual((config.BATCH_SIZE, ), batch[3].shape)
Ejemplo n.º 2
0
    def train(self):
        # Place tensors on the CPU
        with tf.device('/CPU:0'):
            dataset = Dataset()
            ds_train = dataset.get_train()
            ds_smpl = dataset.get_smpl()
            ds_val = dataset.get_val()

        start = 1
        if self.config.RESTORE_EPOCH:
            start = self.config.RESTORE_EPOCH

        for epoch in range(start, self.config.EPOCHS + 1):

            start = time.time()
            print('Start of Epoch {}'.format(epoch))

            dataset_train = ExceptionHandlingIterator(
                tf.data.Dataset.zip((ds_train, ds_smpl)))
            total = int(self.config.NUM_TRAINING_SAMPLES /
                        self.config.BATCH_SIZE)

            for image_data, theta in tqdm(dataset_train,
                                          total=total,
                                          position=0,
                                          desc='training'):
                images, kp2d, kp3d, has3d = image_data[0], image_data[
                    1], image_data[2], image_data[3]
                self._train_step(images, kp2d, kp3d, has3d, theta)

            self._log_train(epoch=epoch)

            total = int(self.config.NUM_VALIDATION_SAMPLES /
                        self.config.BATCH_SIZE)
            for image_data in tqdm(ds_val,
                                   total=total,
                                   position=0,
                                   desc='validate'):
                images, kp2d, kp3d, has3d = image_data[0], image_data[
                    1], image_data[2], image_data[3]
                self._val_step(images, kp2d, kp3d, has3d)

            self._log_val(epoch=epoch)

            print('Time taken for epoch {} is {} sec\n'.format(
                epoch,
                time.time() - start))

            # saving (checkpoint) the model every 5 epochs
            if epoch % 5 == 0:
                print('saving checkpoint\n')
                self.checkpoint_manager.save(epoch)

        self.summary_writer.flush()
        self.checkpoint_manager.save(self.config.EPOCHS + 1)
Ejemplo n.º 3
0
    def test(self):
        """Run evaluation of the model
        Specify LOG_DIR to point to the saved checkpoint directory
        """

        if self.restore_check is None:
            raise RuntimeError(
                'restore did not succeed, pleas check if you set config.LOG_DIR correctly'
            )

        self.restore_check.assert_existing_objects_matched(
        ).assert_nontrivial_match()

        # Place tensors on the CPU
        with tf.device('/CPU:0'):
            dataset = Dataset()
            ds_test = dataset.get_test()

        start = time.time()
        print('Start of Testing')

        mpjpe, mpjpe_aligned, sequences = [], [], []

        total = int(self.config.NUM_TEST_SAMPLES / self.config.BATCH_SIZE)
        for image_data in tqdm(ds_test,
                               total=total,
                               position=0,
                               desc='testing'):
            image, kp3d, sequence = image_data[0], image_data[1], image_data[2]
            kp3d_mpjpe, kp3d_mpjpe_aligned = self._test_step(image, kp3d)

            mpjpe.append(kp3d_mpjpe)
            mpjpe_aligned.append(kp3d_mpjpe_aligned)
            sequences.append(sequence)

        print('Time taken for testing {} sec\n'.format(time.time() - start))

        def convert(tensor, num=None):
            if num is None:
                num = self.config.NUM_KP3D
            return tf.squeeze(tf.reshape(tf.stack(tensor), [-1, num]))

        mpjpe, mpjpe_aligned, sequences = convert(mpjpe), convert(
            mpjpe_aligned), convert(sequences, 1)
        result_dict = {
            "kp3d_mpjpe": mpjpe,
            "kp3d_mpjpe_aligned": mpjpe_aligned,
            "seq": sequences,
        }

        return result_dict
Ejemplo n.º 4
0
    def __init__(self):
        self.is_data_posted = False
        self.log = Log()  # log file to test and debug (writes debug messages)
        self.log_data = LogData()  # sensor data log (if no internet available)
        self.api = SamsApi()  # https://sams.science.itf.llu.lv/ Data Warehouse Plugin to send the data
        self.config = Config()  # Configurations (/config/config.ini)
        self.config_data = self.config.get_config_data()
        self.repost_seconds = int(self.config_data['INTERVAL']['repost_seconds'])
        self.app_wait_time = int(self.config_data['INTERVAL']['app_wait_seconds'])
        self.dataset_taken = False
        self.dataset_taken_counter = 0

        self.data = Dataset()  # collect all the data from sensors
        self.dataset = ""
Ejemplo n.º 5
0
    def __init__(self):
        self.token_handler = TokenHandler()
        self.app_config = ApplicationConfig()
        self.mic = MicrophoneHelper()
        self.dataset = Dataset()
        self.color_print = Color()
        self.sensors = []

        if self.app_config.local_config.is_dht22:
            self.sensors.append("dht22")
        if self.app_config.local_config.is_ds18b20:
            self.sensors.append("ds18b20")
        if self.app_config.local_config.is_scale:
            self.sensors.append("scale")
Ejemplo n.º 6
0
    def test_dataset_smpl(self):
        config = LocalConfig()
        dataset = Dataset().get_smpl()
        for batch in dataset.take(1):
            shape = (config.BATCH_SIZE * config.ITERATIONS, (config.NUM_POSE_PARAMS + config.NUM_SHAPE_PARAMS))
            self.assertEqual(shape, batch.shape)

            pose = batch[0].numpy()[:config.NUM_POSE_PARAMS:]
            mean = tf.reduce_mean(pose)
            expected = np.array(0.0411809, dtype=np.float32)
            self.assertAllCloseAccordingToType(expected, mean)

            shape = batch[0].numpy()[-config.NUM_SHAPE_PARAMS:]
            mean = tf.reduce_mean(shape)
            expected = np.array(0.12554605, dtype=np.float32)
            self.assertAllCloseAccordingToType(expected, mean)
Ejemplo n.º 7
0
    def test_dataset_test(self):
        config = LocalConfig()
        dataset = Dataset().get_test()
        for batch in dataset.take(1):
            image = ((batch[0].numpy()[0, :, :, :] + 1) / 2 * 255).astype(np.int32)
            output = np.sum(image)
            expected = np.array(10050903, dtype=np.int32)
            self.assertAllCloseAccordingToType(expected, output)
            self.assertEqual(config.ENCODER_INPUT_SHAPE, image.shape)

            kp3d = batch[1].numpy()[0, :, :]
            output = np.sum(kp3d)
            expected = np.array(38780.2031, dtype=np.float32)
            self.assertAllCloseAccordingToType(expected, output)
            self.assertEqual((config.NUM_KP3D, 3), kp3d.shape)

            # check if sequence flag is included in correct shape
            sequence = batch[2].numpy()[0].decode("utf-8")
            self.assertEqual('TS1', sequence)
Ejemplo n.º 8
0
    def __init__(self):
        self.dataset_helper = DatasetLogHelper(
        )  # saving and sends the dataset
        self.dataset = Dataset()  # dataset for take sensor data
        self.app_config = ApplicationConfig(
        )  # configuration data (on- and offline)
        self.wifi_helper = WifiHelper(
        )  # gets signal strength for debug purpose
        self.attempts = 0
        self.dwh_api = DataApi()
        self.token_handler = TokenHandler()
        self.error_helper = ErrorHelper()
        self.failed_sensor = ""
        self.handle_online_status()
        self.checker = SelfChecker()

        # send status:
        try:
            send_log(
                f'Start Application: {self.app_config.local_config.version}',
                "debug")
            send_log(f'Config Name: {self.app_config.local_config.group}',
                     "debug")
            send_log(
                f'Signal Strength: {self.wifi_helper.get_signal_strength()}',
                "debug")
            if self.current_volt():
                send_log(f'Voltage: {self.current_volt()}', "debug")

            set_timezone(self.app_config.local_config.timezone)

            for file, status in self.checker.check_files().items():
                send_log(f"created file: {file}.", "warning")
            for failed_sensor in self.error_helper.get_sensors_with_errors():
                send_log(
                    f'Please check {str(failed_sensor)} and reset all errors to reactivate the sensor.',
                    "warning")
        except Exception as e:
            print(e)
Ejemplo n.º 9
0
    def test_dataset_train(self):
        config = LocalConfig()
        dataset = Dataset().get_train()
        for batch in dataset.take(1):
            image = ((batch[0].numpy()[0, :, :, :] + 1) / 2 * 255).astype(
                np.int32)
            output = np.sum(image)
            expected = np.array(6991299, dtype=np.int32)
            self.assertAllCloseAccordingToType(
                expected, output)  # this can sometimes fail with output=66
            self.assertEqual(config.BATCH_SIZE, batch[0].shape[0])
            self.assertEqual(config.ENCODER_INPUT_SHAPE, batch[0].shape[1:])

            kp2d = ((batch[1].numpy()[0, :, :2] + 1) / 2 *
                    image.shape[:2]).astype(np.int32)
            output = np.sum(kp2d)
            expected = np.array(3818, dtype=np.int32)
            self.assertAllCloseAccordingToType(expected, output)
            self.assertEqual((config.BATCH_SIZE, config.NUM_KP2D, 3),
                             batch[1].shape)

            vis = batch[1].numpy()[0, :, 2].astype(np.int32)
            output = np.sum(vis)
            expected = np.array(17, dtype=np.int32)
            self.assertAllCloseAccordingToType(expected, output)

            kp3d = batch[2].numpy()[0, :, :]
            output = np.sum(kp3d)
            expected = np.array(4.11272e-06, dtype=np.float32)
            self.assertAllCloseAccordingToType(expected, output)
            self.assertEqual((config.BATCH_SIZE, config.NUM_KP3D, 3),
                             batch[2].shape)

            # check if has3d flag is included in correct shape
            has3d = batch[3].numpy()[0]
            self.assertEqual(tf.constant(1, tf.int64), has3d)
            self.assertEqual((config.BATCH_SIZE, ), batch[3].shape)
Ejemplo n.º 10
0
    from mpl_toolkits.mplot3d import Axes3D  # noqa: F401 unused import

    class DastasetConfig(LocalConfig):
        # DATA_DIR = join('/', 'data', 'ssd1', 'russales', 'new_records')
        # DATASETS = ['coco'] #['lsp', 'lsp_ext', 'mpii', 'coco', 'mpii_3d', 'h36m']
        # SMPL_DATASETS = ['cmu', 'joint_lim']
        TRANS_MAX = 20

    # class Config is implemented as singleton, inizialize subclass first!
    config = DastasetConfig()

    import tensorflow as tf

    # Place tensors on the CPU
    with tf.device('/CPU:0'):
        dataset = Dataset()
        ds_train = dataset.get_train()
        ds_smpl = dataset.get_smpl()
        ds_val = dataset.get_val()

    import matplotlib.pyplot as plt

    for images, kp2d, kp3d, has3d in ds_train.take(1):
        fig = plt.figure(figsize=(9.6, 5.4))
        image_orig = tf.image.decode_jpeg(images[0], channels=3)
        image_orig = image_orig.numpy()
        kp2d = kp2d[0].numpy()
        ax0 = fig.add_subplot(111)
        image_2d = draw_2d_on_image(image_orig, kp2d[:, :2], vis=kp2d[:, 2])
        ax0.imshow(image_2d)
Ejemplo n.º 11
0
    def test(self, return_kps=False):
        """Run evaluation of the model
        Specify LOG_DIR to point to the saved checkpoint directory

        Args:
            return_kps: set to return keypoints - default = False
        """

        if self.restore_check is None:
            raise RuntimeError(
                'restore did not succeed, pleas check if you set config.LOG_DIR correctly'
            )

        if self.config.INITIALIZE_CUSTOM_REGRESSOR:
            self.restore_check.assert_nontrivial_match()
        else:
            self.restore_check.assert_existing_objects_matched(
            ).assert_nontrivial_match()

        # Place tensors on the CPU
        with tf.device('/CPU:0'):
            dataset = Dataset()
            ds_test = dataset.get_test()

        start = time.time()
        print('Start of Testing')

        mpjpe, mpjpe_aligned, sequences, kps3d_pred, kps3d_real = [], [], [], [], []

        total = int(self.config.NUM_TEST_SAMPLES / self.config.BATCH_SIZE)
        for image_data in tqdm(ds_test,
                               total=total,
                               position=0,
                               desc='testing'):
            image, kp3d, sequence = image_data[0], image_data[1], image_data[2]
            kp3d_mpjpe, kp3d_mpjpe_aligned, predict_kp3d, real_kp3d = self._test_step(
                image, kp3d, return_kps=return_kps)

            if return_kps:
                kps3d_pred.append(predict_kp3d)
                kps3d_real.append(real_kp3d)

            mpjpe.append(kp3d_mpjpe)
            mpjpe_aligned.append(kp3d_mpjpe_aligned)
            sequences.append(sequence)

        print('Time taken for testing {} sec\n'.format(time.time() - start))

        def convert(tensor, num=None, is_kp=False):
            if num is None:
                num = self.config.NUM_KP3D
            if is_kp:
                return tf.squeeze(tf.reshape(tf.stack(tensor), [-1, num, 3]))

            return tf.squeeze(tf.reshape(tf.stack(tensor), [-1, num]))

        mpjpe, mpjpe_aligned, sequences = convert(mpjpe), convert(
            mpjpe_aligned), convert(sequences, 1)
        result_dict = {
            "kp3d_mpjpe": mpjpe,
            "kp3d_mpjpe_aligned": mpjpe_aligned,
            "seq": sequences,
        }

        if return_kps:
            kps3d_pred, kps3d_real = convert(kps3d_pred,
                                             is_kp=True), convert(kps3d_real,
                                                                  is_kp=True)
            result_dict.update({
                'kps3d_pred': kps3d_pred,
                'kps3d_real': kps3d_real
            })

        return result_dict
Ejemplo n.º 12
0
import numpy as np

if __name__ == '__main__':

    text = '花菇历来被国人作为延年益寿的补品,是香菇中的上品,含有丰富的营养价值,可帮助调节人体新陈代谢,助消化'
    text = '冬天到了,吃什么比较好'
    text = '天气冷了吃什么比较好'
    text = '天气冷了'
    text = '小孩'
    text = '美容'
    text = '老公'

    conf = configparser.ConfigParser()
    conf.read("./conf.ini")

    ds = Dataset(conf)

    # # prepro

    # ds.segment_data('./data/baidu_recipe.json',
    #                 './data/baidu_recipe_segment.json',
    #                 ["title", "illustration", "practice", "materials"],
    #                 True)
    # ds.ngram_data('./data/baidu_recipe_segment.json',
    #               './data/baidu_recipe_ngram.json',
    #               ["title", "illustration", "practice", "materials"])
    #
    # ds.segment_data('./data/recipe_label_data.json',
    #                 './data/recipe_segment.json',
    #                 ["title", "illustration", "practice", "materials"],
    #                 True)