Exemplo n.º 1
0
    def _log_compounded_metrics(self, ATEs, REs, drift_errors):
        metric_logger = MetricLogger()

        compound_drift_errors = CompoundTranslationRotationDrift(
            self.model_name, drift_errors)
        metric_logger.log(compound_drift_errors)
        CometLogger.get_experiment().log_metric(
            "avg_translation_error_percent",
            compound_drift_errors.metrics["avg_translation_error_percent"])
        CometLogger.get_experiment().log_metric(
            "avg_rotation_error_degrees_per_meter", compound_drift_errors.
            metrics["avg_rotation_error_degrees_per_meter"])

        compound_ATE = CompoundAbsoluteTrajectoryError(self.model_name, ATEs)
        metric_logger.log(compound_ATE)
        CometLogger.get_experiment().log_metric(
            "ATE_trans_RMSE", compound_ATE.metrics["absolute_trajectory_error"]
            ['ATE_trans_stats']["rmse"])
        CometLogger.get_experiment().log_metric(
            "ATE_rot_degrees_RMSE",
            compound_ATE.metrics["absolute_trajectory_error"]['ATE_rot_stats']
            ["rmse"])

        compound_RE = CompoundRelativeError(self.model_name, REs)
        metric_logger.log(compound_RE)
Exemplo n.º 2
0
    def _log_matrix_poses(self, poses, poses_gt, dataset_name: str,
                          trajectory_name: str):
        """
        Logs the pose in text format where the angle is a rotation matrix:
            T00 T01 T02 T03
            T10 T11 T12 T13
            T20 T21 T22 T23
            0   0   0   1

            T00 T01 T02 T03 T10 T11 T12 T13 T20 T21 T22 T23
        """
        pose_output = ""
        pose_gt_output = ""
        matrices = Geometry.poses_to_transformations_matrix(
            poses[:, 3:], poses[:, :3])
        matrices_gt = Geometry.poses_to_transformations_matrix(
            poses_gt[:, 3:], poses_gt[:, :3])

        for i, _ in enumerate(matrices):
            pose_matrix = matrices[i]
            pose_matrix_gt = matrices_gt[i]

            pose_output = pose_output + f"{pose_matrix[0][0]} {pose_matrix[0][1]} {pose_matrix[0][2]} {pose_matrix[0][3]} " \
                                        f"{pose_matrix[1][0]} {pose_matrix[1][1]} {pose_matrix[1][2]} {pose_matrix[1][3]} " \
                                        f"{pose_matrix[2][0]} {pose_matrix[2][1]} {pose_matrix[2][2]} {pose_matrix[2][3]}"

            pose_gt_output = pose_gt_output + f"{pose_matrix_gt[0][0]} {pose_matrix_gt[0][1]} {pose_matrix_gt[0][2]} {pose_matrix_gt[0][3]} " \
                                        f"{pose_matrix_gt[1][0]} {pose_matrix_gt[1][1]} {pose_matrix_gt[1][2]} {pose_matrix_gt[1][3]} " \
                                        f"{pose_matrix_gt[2][0]} {pose_matrix_gt[2][1]} {pose_matrix_gt[2][2]} {pose_matrix_gt[2][3]}"
            if i < len(poses) - 1:
                pose_output = pose_output + "\n"
                pose_gt_output = pose_gt_output + "\n"

        metadata = dict()
        metadata["title"] = "pose_output_matrix"
        metadata["dataset"] = dataset_name
        metadata["trajectory"] = trajectory_name
        metadata["model"] = self.model_name
        filename = f'{metadata["title"]}_{dataset_name}_{trajectory_name}_{metadata["model"]}.txt'
        CometLogger.get_experiment().log_asset_data(pose_output,
                                                    name=filename,
                                                    metadata=metadata)

        metadata["title"] = "pose_gt_output_matrix"
        filename = f'{metadata["title"]}_{dataset_name}_{trajectory_name}_{metadata["model"]}.txt'
        CometLogger.get_experiment().log_asset_data(pose_gt_output,
                                                    name=filename,
                                                    metadata=metadata)
Exemplo n.º 3
0
def load_optimizer(param: Parameters, model: nn.Module) -> Optimizer:
    CometLogger.get_experiment().log_parameter("Optimizer", param.optimizer)
    CometLogger.get_experiment().log_parameter("Learning rate",
                                               param.learning_rate)

    if param.optimizer is "Adagrad":
        CometLogger.print("Using Adagrad")
        return optim.Adagrad(model.parameters(), lr=param.learning_rate)
    elif param.optimizer is "Adam":
        CometLogger.print("Using Adam Optimizer")
        return optim.Adam(model.parameters(), lr=param.learning_rate)
    elif param.optimizer is "RMSProp":
        CometLogger.print("Using RMSProp Optimizer")
        return optim.RMSprop(model.parameters(), lr=param.learning_rate)
    else:
        CometLogger.print("Optimizer {} was not implemented".format(
            param.optimizer))
        raise NotImplementedError()
Exemplo n.º 4
0
    def _log_quaternion_poses(self, poses, poses_gt, dataset_name: str,
                              trajectory_name: str):
        """
        Logs the pose in text format where the angle is a quaternion:
            timestamp tx ty tz qx qy qz qw
        """
        pose_output = ""
        pose_gt_output = ""

        for i, pose in enumerate(poses):
            # att.elements[[1, 2, 3, 0]] reorganizes quaternion elements
            # from scalar first w-x-y-z to scalar last x-y-z-w
            rotation_quat = Geometry.tait_bryan_rotation_to_quaternion(
                pose[:3]).elements[[1, 2, 3, 0]]
            rotation_quat_gt = Geometry.tait_bryan_rotation_to_quaternion(
                poses_gt[i][:3]).elements[[1, 2, 3, 0]]

            pose_output = pose_output + f"{i} {pose[3]} {pose[4]} {pose[5]} " \
                                        f"{rotation_quat[0]} {rotation_quat[1]} {rotation_quat[2]} {rotation_quat[3]}"
            pose_gt_output = pose_gt_output + f"{i} {poses_gt[i][3]} {poses_gt[i][4]} {poses_gt[i][5]} " \
                                        f"{rotation_quat_gt[0]} {rotation_quat_gt[1]} {rotation_quat_gt[2]} " \
                                        f"{rotation_quat_gt[3]}"
            if i < len(poses) - 1:
                pose_output = pose_output + "\n"
                pose_gt_output = pose_gt_output + "\n"

        metadata = dict()
        metadata["title"] = "pose_output_quaternion"
        metadata["dataset"] = dataset_name
        metadata["trajectory"] = trajectory_name
        metadata["model"] = self.model_name
        filename = f'{metadata["title"]}_{dataset_name}_{trajectory_name}_{metadata["model"]}.txt'
        CometLogger.get_experiment().log_asset_data(pose_output,
                                                    name=filename,
                                                    metadata=metadata)

        metadata["title"] = "pose_gt_output_quaternion"
        filename = f'{metadata["title"]}_{dataset_name}_{trajectory_name}_{metadata["model"]}.txt'
        CometLogger.get_experiment().log_asset_data(pose_gt_output,
                                                    name=filename,
                                                    metadata=metadata)
Exemplo n.º 5
0
    def run(self, epochs_number: int) -> nn.Module:
        for epoch in self._epochs(epochs_number):
            CometLogger.print("=========== Epoch {} ===========".format(epoch))
            t0 = time.time()
            custom_train_loss, train_benchmark_loss = self._train()
            custom_valid_loss, valid_benchmark_loss = self._validate()
            t1 = time.time()
            epoch_run_time = t1 - t0
            self._log_epoch(custom_train_loss, custom_valid_loss, epoch,
                            epoch_run_time, train_benchmark_loss,
                            valid_benchmark_loss)
            self.early_stopper(custom_valid_loss, self.model, self.optimizer)

            if self.early_stopper.early_stop:
                CometLogger.get_experiment().log_metric(
                    "Early stop epoch", epoch + 1)
                CometLogger.print("Early stopping")
                break

        CometLogger.print(
            "Training complete, loading the last early stopping checkpoint to memory..."
        )
        self.model.load_state_dict(self.early_stopper.load_model_checkpoint())
        return self.model
Exemplo n.º 6
0
    def run(self):
        trajectory_rotation_losses = []
        trajectory_translation_losses = []
        drift_errors = []
        ATEs = []
        REs = []

        for dataset_name, trajectory_name, dataloader in self.trajectory_dataloaders:
            dataset: AbstractSegmentDataset = dataloader.dataset
            print("testing {}, {}".format(trajectory_name, dataset_name))

            start = time.time()
            predictions, rotation_losses, translation_losses, absolute_ground_truth = self._test(
                dataloader)
            end = time.time()

            last_included_index = self._trim_trajectories(
                absolute_ground_truth[:, 3:])
            predictions = predictions[:last_included_index + 1]
            absolute_ground_truth = absolute_ground_truth[:
                                                          last_included_index +
                                                          1]

            CometLogger.print(
                f"Inferred {len(predictions)} poses in {end-start} seconds.\n"
                f"Dataset fps: {dataset.framerate}, inference fps {len(predictions)/(end-start)}."
            )

            trajectory_rotation_losses.append(
                (dataset_name, trajectory_name, rotation_losses))
            trajectory_translation_losses.append(
                (dataset_name, trajectory_name, translation_losses))

            plotter = TrajectoryPlotter(trajectory_name, dataset_name,
                                        self.model_name, absolute_ground_truth,
                                        predictions)
            CometLogger.get_experiment().log_figure(
                figure=plotter.rotation_figure,
                figure_name='rotation {} {}'.format(trajectory_name,
                                                    dataset_name))

            CometLogger.get_experiment().log_figure(
                figure=plotter.position_figure,
                figure_name='translation {} {}'.format(trajectory_name,
                                                       dataset_name))

            drift, ATE, RE = self._log_metrics(absolute_ground_truth, dataset,
                                               dataset_name, predictions,
                                               trajectory_name)
            drift_errors.append(drift)
            ATEs.append(ATE)
            REs.append(RE)

            self._log_matrix_poses(predictions, absolute_ground_truth,
                                   dataset_name, trajectory_name)
            self._log_quaternion_poses(predictions, absolute_ground_truth,
                                       dataset_name, trajectory_name)

        self._log_compounded_metrics(ATEs, REs, drift_errors)

        losses_figure = self._plot_trajectory_losses(
            trajectory_rotation_losses, trajectory_translation_losses)
        CometLogger.get_experiment().log_figure(
            figure=losses_figure, figure_name="trajectory_losses")

        # compute total avg losses
        translation_loss = self._complute_total_avg_loss(
            trajectory_translation_losses)
        rotation_loss = self._complute_total_avg_loss(
            trajectory_rotation_losses)

        CometLogger.get_experiment().log_metric(
            "Total Avg Translation loss (test phase)", translation_loss)
        CometLogger.get_experiment().log_metric(
            "Total Avg Rotation loss (test phase)", rotation_loss)
Exemplo n.º 7
0
 def _log_epoch(self, custom_train_loss, custom_valid_loss, epoch,
                epoch_run_time, train_benchmark_loss, valid_benchmark_loss):
     CometLogger.print("Epoch run time: {}".format(epoch_run_time))
     CometLogger.get_experiment().log_metric("epoch run time",
                                             epoch_run_time,
                                             epoch=epoch)
     CometLogger.get_experiment().log_metric("mean training loss",
                                             train_benchmark_loss,
                                             epoch=epoch)
     CometLogger.get_experiment().log_metric("mean validation loss",
                                             valid_benchmark_loss,
                                             epoch=epoch)
     CometLogger.get_experiment().log_metric("custom mean training loss",
                                             custom_train_loss,
                                             epoch=epoch)
     CometLogger.get_experiment().log_metric("custom mean validation loss",
                                             custom_valid_loss,
                                             epoch=epoch)
     CometLogger.print("Mean train loss: {}, Valid train loss: {}".format(
         custom_train_loss, custom_valid_loss))
     CometLogger.get_experiment().log_metric("epoch", epoch)
     CometLogger.get_experiment().log_epoch_end(epoch_cnt=epoch)
Exemplo n.º 8
0
    def _train(self) -> tuple:
        timer_start_time = time.time()

        self.model.train()

        losses_sum = 0
        benchmark_losses_sum = 0

        for i, (input, target) in enumerate(self.train_dataloader):
            CometLogger.get_experiment().log_metric("Current batch", i + 1)
            CometLogger.get_experiment().log_metric("Total nbr of batches",
                                                    len(self.train_dataloader))

            # Only log this if we are NOT in a multiprocessing session
            if CometLogger.gpu_id is None:
                print("--> processing batch {}/{} of size {}".format(
                    i + 1, len(self.train_dataloader), len(input)))
            if cuda_is_available():
                with ThreadingTimeout(14400.0) as timeout_ctx1:
                    input = input.cuda(
                        non_blocking=self.train_dataloader.pin_memory)
                    target = target.cuda(
                        non_blocking=self.train_dataloader.pin_memory)
                if not bool(timeout_ctx1):
                    CometLogger.fatalprint(
                        'Encountered fatally long delay when moving tensors to GPUs'
                    )

            prediction = self.model.forward(input)

            with ThreadingTimeout(14400.0) as timeout_ctx3:
                if type(prediction) is tuple:
                    benchmark_loss = self.benchmark_MSE_loss.compute(
                        prediction[0], target)
                else:
                    benchmark_loss = self.benchmark_MSE_loss.compute(
                        prediction, target)
            if not bool(timeout_ctx3):
                CometLogger.fatalprint(
                    'Encountered fatally long delay during computation of benchmark loss'
                )

            with ThreadingTimeout(14400.0) as timeout_ctx4:
                benchmark_losses_sum += float(
                    benchmark_loss.data.cpu().numpy())
            if not bool(timeout_ctx4):
                CometLogger.fatalprint(
                    'Encountered fatally long delay during summation of benchmark losses'
                )

            with ThreadingTimeout(14400.0) as timeout_ctx4:
                loss = self.custom_loss.compute(prediction, target)
            if not bool(timeout_ctx4):
                CometLogger.fatalprint(
                    'Encountered fatally long delay during computation of the custom loss'
                )

            self._backpropagate(loss)

            with ThreadingTimeout(14400.0) as timeout_ctx6:
                losses_sum += float(loss.data.cpu().numpy())
            if not bool(timeout_ctx6):
                CometLogger.fatalprint(
                    'Encountered fatally long delay during loss addition')

        timer_end_time = time.time()

        CometLogger.get_experiment().log_metric(
            "Epoch training time", timer_end_time - timer_start_time)

        return losses_sum / len(
            self.train_dataloader), benchmark_losses_sum / len(
                self.train_dataloader)