예제 #1
0
 def confidences(self):
     if 'confidence' in self.raw:
         return self.raw['confidence']
     LoggingManager.instance().warning(
         f"There are no 'confidences' for that sample {self.datasource.label}."
     )
     return None
예제 #2
0
 def label_names(self):
     '''Converts the category numbers in their corresponding names (e.g. 0 -> 'pedestrian') and returns the list of names for all boxes in the sample'''
     label_source_name = categories.get_source(platform_utils.parse_datasource_name(self.datasource.label)[2])
     try:
         return [categories.CATEGORIES[label_source_name][str(category_number)]['name'] for category_number in self.raw['data']['classes']]
     except:
         LoggingManager.instance().warning(f"Can not find the CATEGORIES and NAMES of {label_source_name}.")
예제 #3
0
    def raw(self):
        if self._raw is None:
            r = super(Echo, self).raw
            r['das.sample'] = self
            if r['data']['timestamps'][-1] == 0:
                try:
                    cfg = self.datasource.sensor['cfg'].get_at_timestamp(
                        self.timestamp).raw
                    if self.datasource.sensor.specs is None:
                        self.datasource.sensor.specs = banks.extract_specs(
                            lambda n: cfg[n])

                    try:
                        if self.datasource.sensor.modules_angles is None:
                            self.datasource.sensor.modules_angles = banks.extract_intrinsics_modules_angles(
                                lambda n: cfg[n])
                    except:
                        LoggingManager.instance().warning(
                            "Sensor {} has no modules angles, or can not read modules angles from the sensor intrinsic calibratrion"
                            .format(self.datasource.sensor.name))

                    banks.add_timestamp_offsets(
                        r, self.datasource.sensor.name,
                        self.datasource.sensor.specs,
                        int(cfg['ID_ACCUMULATION_EXP']),
                        int(cfg['ID_OVERSAMPLING_EXP']),
                        int(cfg['ID_BASE_POINT_COUNT']))
                except:
                    pass

            self._raw = r

        return self._raw
예제 #4
0
    def _check_timestamps_consistency(self):
        nts, nfiles = self._get_nb_timestamps_and_files_or_rows()

        if nfiles != nts:
            n = min(nts, nfiles)
            LoggingManager.instance().warning(
                'The number of timestamps and data files are '
                'different for sensor %s (nfiles: %d != nts: %d). '
                'Keeping the %d first timestamps and files' %
                (self.path, nfiles, nts, n))
            self.timestamps = self.timestamps[:n]

            if (self.nb_data_per_pkl_file == 1):
                self.files = self.files[:n]
            else:
                if n % self.nb_data_per_pkl_file == 0:
                    self.files = self.files[:int(n /
                                                 self.nb_data_per_pkl_file)]
                else:
                    # on va conserver un fichier à la fin qui ne sera pas utilisé en entier
                    self.files = self.files[:n // self.nb_data_per_pkl_file +
                                            1]

            nts, nfiles = self._get_nb_timestamps_and_files_or_rows()
            assert nfiles == nts
예제 #5
0
 def attributes(self):
     if 'attributes' in self.raw:
         return self.raw['attributes']
     LoggingManager.instance().warning(
         f"There are no 'attributes' for that sample {self.datasource.label}."
     )
     return None
예제 #6
0
    def _load_offline_datasources(self, name: str):

        archives = glob.glob(os.path.join(self.platform.dataset, name + '_*'))
        for archive in [os.path.basename(f) for f in archives]:

            if archive.endswith('.zip'):
                # remove the .zip and extracts the 'datasource' suffix:
                ds_name = os.path.splitext(archive)[0].split('_')[
                    -1]  #.split('-')[0]
                try:
                    ds = ZipFileSource(
                        os.path.join(self.platform.dataset, archive))
                    self._sensors[name].add_datasource(
                        ds,
                        ds_name,
                        cache_size=self.platform.default_cache_size)
                except:
                    LoggingManager.instance().warning(
                        f'Zip file for {name}_{ds_name} could not be loaded.')
                    continue

            else:
                ds_name = archive.split('_')[-1]  #.split('-')[0]
                try:
                    ds = DirSource(os.path.join(self.platform.dataset,
                                                archive))
                    self._sensors[name].add_datasource(
                        ds,
                        ds_name,
                        cache_size=self.platform.default_cache_size)
                except:
                    LoggingManager.instance().warning(
                        f'Data directory for {name}_{ds_name} could not be loaded.'
                    )
                    continue
    def _add_imu_quality_data(self):
        try:
            metadata_entry = 'IMU_step_ratio'
            ratio_ = vif.get_trajectory_step_ratio(
                self.synchronized,
                'flir_bfc_img',
                'sbgekinox_bcc_navposvel',
                traj_min_epsilon_precision=1e-3)

            self._add_column(metadata_entry)
            self.metadata_dirty[metadata_entry][self.frame_selection] = ratio_[
                self.frame_selection]
        except Exception as e:
            LoggingManager.instance().warning(str(e))

        try:
            metadata_entry = 'IMU_standard_score'
            ratio_ = vif.get_trajectory_standard_score(
                self.synchronized,
                'flir_bfc_img',
                'sbgekinox_bcc_navposvel',
                traj_seq_memory=200)

            self._add_column(metadata_entry)
            self.metadata_dirty[metadata_entry][self.frame_selection] = ratio_[
                self.frame_selection]
        except Exception as e:
            LoggingManager.instance().warning(str(e))

        self.window.isDirty = True
        self._update()
예제 #8
0
    def apply_temperature_correction(self, timestamp, distances):
        """Applies temperature-related distance corrections"""
        if (self.temperature_slope is None
                and self.temperature_reference is None):
            self.temperature_compensation = MotorLidar.TemperatureCompensation.Deactivated
            print(
                'MotorLidar Log: Temperature compensation mode is deactivated')
            return distances

        offsets_ = 0
        try:
            temperature = self.datasources['temp'].get_at_timestamp(
                timestamp).raw['data']
            k = len(self.temperature_coeffs) - 1
            offsets_ -= np.sum([
                temperature**(k - i) * self.temperature_coeffs[i]
                for i in range(k)
            ])
        except Exception as e:
            temperature = None
            LoggingManager.instance().warning(
                'Failed to apply the temperature for the distance correction. '
                'Error: {}'.format(str(e)))
        offsets_ -= self.temperature_coeffs[-1]

        #print(f'Log: 3D points for {self.name} are corrected with temperature T={temperature}: global offset is {offsets_}')
        return distances + offsets_
예제 #9
0
 def load_specs_from_cfg(self):
     try:
         cfg = self['cfg'][0].raw
         self.specs = banks.extract_specs(lambda n: cfg[n])
     except:
         LoggingManager.instance().warning(
             "Unable to read the specs from the sensor {}".format(
                 self.name))
예제 #10
0
 def does_path_exist(self, silent=True):
     exists = self.src_sensor is not None and self.dst_sensor is not None
     if not exists and not silent:
         LoggingManager.instance().warning(
             f"Undefined extrinsics path: {'undefined sensor' if self.src_sensor is None else self.src_sensor.name} -> \
             {'undefined sensor' if self.dst_sensor is None else self.dst_sensor.name}"
         )
     return exists
예제 #11
0
def opengl_error_check(cf, prefix=""):  # pass currentframe()
    error = GL.glGetError()
    if error != GL.GL_NO_ERROR:
        frameinfo = getframeinfo(cf)
        LoggingManager.instance().warning(
            f"{prefix} @ line {frameinfo.lineno} : OpenGL error {error}")
        return True
    return False
예제 #12
0
    def get_files(self):

        files = []

        timestamps = None
        time_of_issues = None
        nb_data_per_pkl_file = 1

        for fullpath in glob.glob(f'{self.path}/*'):
            name = fullpath.split('/')[-1]
            if self.pattern is None:
                match, self.pattern = try_all_patterns(name)
            else:
                match = re.match(self.pattern, name)
            if match:
                groups = match.groups()
                if self.sort and not groups:
                    raise ValueError('no groups')
                if self.sort and groups:
                    sample = (int(groups[0]), fullpath)
                else:
                    sample = fullpath
                files.append(sample)

            # read config for multiple rows per .pkl file (high fps sensors)
            elif (name == Constants.CONFIG_YML_PATTERN):
                with open(fullpath) as f:
                    data_yaml = yaml.safe_load(f)
                    nb_data_per_pkl_file = data_yaml['nb_vectors_in_pkl']

            # read timestamps
            elif (name == Constants.TIMESTAMPS_CSV_PATTERN):
                with open(fullpath) as f:
                    sensor_ts = pd.read_csv(f,
                                            delimiter=" ",
                                            dtype='u8',
                                            header=None).values

                    timestamps = sensor_ts[:, 0]
                    # check if ts is always go up, for imu data (more than 1 data per pkl file) the test is not complet -> to improve
                    if len(timestamps) > 2 and (np.min(
                            np.diff(timestamps.astype(np.int64))) < 0):
                        LoggingManager.instance().warning(
                            'Timestamps are not strictly increasing for datasource file {}'
                            .format(self.path))

                    if sensor_ts.shape[1] > 1:
                        time_of_issues = sensor_ts[:, 1]
                    else:
                        time_of_issues = timestamps

        if self.sort:
            files.sort()
            files = [s[1] for s in files]

        return files, time_of_issues, timestamps, nb_data_per_pkl_file
예제 #13
0
def get_sampling_ordering_flat(v, h):

    global _ordering_cache
    if (v, h) not in _ordering_cache:
        if v in [8, 16, 64, 128, 172, 256, 312]:
            if platform.system() == "Windows":
                ctypes.cdll.LoadLibrary(
                    os.path.join(os.path.dirname(__file__),
                                 "leddar_utils_lcax.dll"))
                lca_math = ctypes.cdll.leddar_utils_lcax
            else:
                ctypes.cdll.LoadLibrary(
                    os.path.join(os.path.dirname(__file__),
                                 "libleddar_utils_lcax.so"))
                lca_math = ctypes.CDLL("libleddar_utils_lcax.so")

            lca_math.MATH_GenIndexMap(v, h)

            fMATH_CopyMapTrc2Mem = lca_math.MATH_CopyMapTrc2Mem
            fMATH_CopyMapTrc2Mem.restype = None
            fMATH_CopyMapTrc2Mem.argtypes = [
                ctypes.c_size_t,
                np.ctypeslib.ndpointer(ctypes.c_uint16, flags="C_CONTIGUOUS"),
                ctypes.c_bool
            ]

            sampled_to_ordered = np.zeros((v * h), dtype=np.uint16, order='C')
            fMATH_CopyMapTrc2Mem(v * h, sampled_to_ordered, False)

            sampled_to_ordered_inv = np.zeros((v * h),
                                              dtype=np.uint16,
                                              order='C')
            fMATH_CopyMapTrc2Mem(v * h, sampled_to_ordered_inv, True)

            fMATH_CopyMapMem2Trc = lca_math.MATH_CopyMapMem2Trc
            fMATH_CopyMapMem2Trc.restype = None
            fMATH_CopyMapMem2Trc.argtypes = [
                ctypes.c_size_t,
                np.ctypeslib.ndpointer(ctypes.c_uint16, flags="C_CONTIGUOUS"),
                ctypes.c_bool
            ]

            ordered_to_sampled = np.zeros((v * h), dtype=np.uint16, order='C')
            fMATH_CopyMapMem2Trc(v * h, ordered_to_sampled, False)
            ordered_to_sampled_inv = np.zeros((v * h),
                                              dtype=np.uint16,
                                              order='C')
            fMATH_CopyMapMem2Trc(v * h, ordered_to_sampled_inv, False)
            _ordering_cache[(v, h)] = (ordered_to_sampled, sampled_to_ordered,
                                       ordered_to_sampled_inv,
                                       sampled_to_ordered_inv)
        else:
            LoggingManager.instance().warning("Unsupported v resolution")
            return None, None

    return _ordering_cache[(v, h)]
예제 #14
0
 def load_static_noise_from_cfg(self):
     try:
         cfg = self['cfg'][0].raw
         self.static_noise = banks.extract_intrinsics_static_noise(
             lambda n: cfg[n])
     except:
         self.static_noise = 0
         LoggingManager.instance().warning(
             "Unable to read the static noise from the sensor {}".format(
                 self.name))
예제 #15
0
    def _get_orientation_lut(self):
        if self.orientation is not None:
            orientation_f4 = self.orientation.astype('f4')
            lut = Sample.__goc_lut()
            if orientation_f4.tostring() in lut:
                return lut[orientation_f4.tostring()]

            LoggingManager.instance().warning(
                'orientation {} could not be mapped to any image transform'.
                format(self.orientation))
        return None
예제 #16
0
 def distortion_coeffs(self):
     """np.ndarray: Nx1 distortion coefficient, refer to opencv documentation 
     
     See also: https://docs.opencv.org/4.0.0/d9/d0c/group__calib3d.html#ga3207604e4b1a1758aa66acb6ed5aa65d
     """
     try:
         k = self.intrinsics['distortion']
     except:
         LoggingManager.instance().warning(
             f'Distortion coeffients not found for {self.name}.')
         k = np.zeros((5, 1))
     return k
예제 #17
0
    def create(cls, recordable, datasource, platform, synchronized, video_fps=None):
        if video_fps is not None:
            hz = video_fps
        else:
            ts = platform[datasource].timestamps
            hz = 1e6/np.mean(ts[1:] - ts[:-1])

            if hz < 1 or hz > 100:
                hz_ = max(1, min(hz, 100)) #for the case units are wrong
                LoggingManager.instance().warning(f"[For datasource {datasource}] Clipping video framerate to {hz_} fps (was {hz} fps)")
                hz = hz_

        return VideoRecorder(cls.__create_key, hz, recordable, datasource)
예제 #18
0
 def load_time_base_delays_from_cfg(self):
     try:
         cfg = self['cfg'][0].raw
         self.time_base_delays = banks.extract_intrinsics_timebase_delays(
             lambda n: cfg[n])
     except:
         self.time_base_delays = 0 if not 'ftrr' in self else {
             'high': 0,
             'low': 0
         }
         LoggingManager.instance().warning(
             "Unable to read the time base delays from the sensor {}".
             format(self.name))
예제 #19
0
    def add_virtual_datasources(self, virtual_datasources_config):
        """Add virtual datasources based upon the provided configuration dictionnary
        
            Args:
                -virtual_datasources_config (dict): Each key must correspond to a key available in
                    VIRTUAL_DATASOURCE_FACTORY (see api/datasources/virtual_datasources/__init__.py).
                    Under each key, a dictionnary must be provided with the parameters necessary
                    to create the corresponding virtual datasource. These parameters always contain at 
                    least the 'reference_sensor' and 'dependencies' entries. Other entries depend upon
                    the specific virtual datasource type.

                    For exemple:
                        virtual_datasources_config = {
                            'echoes_from_traces': {
                                'reference_sensor': 'pixell_bfc',
                                'dependencies': ['pixell_bfc_ftrr'],
                                'nb_detections_max': 3,
                            }
                        }

            Note: To create multiple virtual datasources of the same type, you can do something like:
                virtual_datasources_config = {
                    'echoes_from_traces_bfc': {'reference_sensor': 'pixell_bfc', 'dependencies': ['pixell_bfc_ftrr']}
                    'echoes_from_traces_tfc': {'reference_sensor': 'pixell_tfc', 'dependencies': ['pixell_tfc_ftrr']}
                }
        """

        for virtual_ds_name in virtual_datasources_config:
            args = virtual_datasources_config[virtual_ds_name]

            # If multiple instances of the same VirtualDatasource class, their keys has to be different
            # in the config file. So add a unique id such as: virtual_ds_name -> virtual_ds_name_id
            if hasattr(virtual_datasources,
                       virtual_ds_name[:virtual_ds_name.rfind('_')]):
                virtual_ds_name = virtual_ds_name[:virtual_ds_name.rfind('_')]

            if hasattr(virtual_datasources, virtual_ds_name):
                try:
                    virtual_datasource = virtual_datasources.VIRTUAL_DATASOURCE_FACTORY[
                        virtual_ds_name](**args)
                    self[virtual_datasource.reference_sensor].add_datasource(
                        virtual_datasource, virtual_datasource.ds_type)
                except:
                    LoggingManager.instance().warning(
                        f"The virtual datasource {virtual_ds_name} could not be added."
                    )
            else:
                LoggingManager.instance().warning(
                    f"The virtual datasource {virtual_ds_name} does not exist."
                )
예제 #20
0
                def f_TIMESTAMPS_CSV_PATTERN(archive):
                    with archive.open(name) as stream:
                        # sensor_ts = np.loadtxt(stream, dtype='u8', delimiter=' ', ndmin=2)
                        sensor_ts = pd.read_csv(stream, delimiter=" ", dtype='u8', header=None).values

                        timestamps = sensor_ts[:,0]
                        # check if ts is always go up, for imu data (more than 1 data per pkl file) the test is not complet -> to improve
                        if len(timestamps)>2 and (np.min(np.diff(timestamps.astype(np.int64)))<0):
                            LoggingManager.instance().warning('Timestamps are not strictly increasing for datasource file {}'.format(self.path))

                        if sensor_ts.shape[1] > 1:
                            time_of_issues = sensor_ts[:,1]
                        else:
                            time_of_issues = timestamps
                        return time_of_issues, timestamps
예제 #21
0
    def __init__(self, window, synchronized):
        super(MetadataWindow, self).__init__(window, synchronized.platform)
        self.synchronized = synchronized
        self.backend = self.window.findChild(QObject, "figure")
        self.ax = self.backend.getFigure().add_subplot(111)

        if self.platform.metadata is not None:
            if len(self.platform.metadata) != len(self.synchronized):
                LoggingManager.instance().warning(
                    f'The current synchronized platform has a number of frames different than what is contained in the metadata file. '
                )
                self.metadata_dirty = pd.DataFrame(
                    index=range(len(self.synchronized)))
            else:
                self.metadata_dirty = copy.deepcopy(self.platform.metadata)
        else:
            self.metadata_dirty = pd.DataFrame(
                index=range(len(self.synchronized)))

        self.window.columnNames = list(self.metadata_dirty.columns)
        self.window.showColumn = {c: False for c in self.window.columnNames}

        #Dummy table to avoid a few warnings
        self.table_model = DataFrameModel(self.metadata_dirty)

        with open(
                f'{os.path.dirname(os.path.abspath(__file__))}/../metadata.yml',
                'r') as f:
            self.standard_entries = yaml.safe_load(f)

        self.window.entries.model = list(self.standard_entries.keys())
        self.window.entries.currentIndex = 0
        self.window.entryDescription.text = self.get_entry_description()
        self.window.input.model = list(
            self.standard_entries[self.get_entry()]['values'])
        self.window.input.currentIndex = 0

        self._select_all_frames()
        self.last_cursor_value = int(self.window.playerCursor)
        self.window.isDirty = False

        self.window.addSyncQualityData.visible = CHECK_SYNC_AVAILABLE
        self.window.addIMUQualityData.visible = 'sbgekinox_bcc_navposvel' in self.synchronized.platform.datasource_names(
        )
        self.window.addObjectQuantityData.visible = 'flir_bfc_box2d-detectron-cyl' in self.synchronized.platform.datasource_names(
        )
예제 #22
0
    def apply_temperature_correction(self, timestamp, indices, distances):
        """Applies temperature-related distance corrections"""
        if (self.temperature_slope is not None
                and self.temperature_reference is not None):
            try:
                temp = self.get_temperature_at(timestamp)
            except Exception as e:
                LoggingManager.instance().warning(
                    'Failed to apply temperature correction. '
                    'Error: {}'.format(str(e)))
                return distances

            temp_offset = self.temperature_slope * (temp -
                                                    self.temperature_reference)
            return distances + temp_offset

        return distances
예제 #23
0
 def get_synchronized(self, dataset_index, progress_bar=False):
     if self.cached_synchronized[dataset_index] is None:
         try:
             pf = Platform(self.datasets[dataset_index],
                           include=self.include,
                           ignore=self.ignore,
                           progress_bar=progress_bar,
                           default_cache_size=0)
             if self.virtual_datasources_config is not None:
                 pf.add_virtual_datasources(self.virtual_datasources_config)
             sync = pf.synchronized(self.sync_labels, self.interp_labels,
                                    self.tolerance_us)
             self.cached_synchronized[dataset_index] = sync
         except Exception:
             LoggingManager.instance().warning(
                 f"There is an issue with the dataset {self.datasets[dataset_index]}."
             )
     return self.cached_synchronized[dataset_index]
예제 #24
0
    def load_intrinsics(self, intrinsics_config: str):
        """Looks for a pickle file containing intrinsics information for this sensor, e.g. 'eagle_tfc.pkl'

        Args:
            intrinsics_config: path to folder containing this sensor's intrinsics pickle file, 
            (absolute or relative to dataset path), e.g. '/nas/cam_intrinsics' or 'cam_intrinsics'
        """

        paths = glob.glob(
            os.path.join(self.pf.try_absolute_or_relative(intrinsics_config),
                         '{}*results.pkl'.format(self.name)))
        if paths:
            path = paths[0]
            if len(paths) > 1:
                LoggingManager.instance().warning(
                    'more than one intrinsics, using {}'.format(path))
            with open(path, 'rb') as f:
                self.intrinsics = pickle.load(f)
예제 #25
0
    def _preload_lenghts(self):

        self.lenghts = []
        for dataset_index in tqdm.tqdm(range(len(self.datasets)),
                                       'Grouping synchronized platforms'):
            try:
                if self.preload:
                    l = self._get_dataset_lenght(dataset_index)
                else:
                    l = self._get_dataset_lenght_fast(dataset_index)
            except Exception:
                l = 0
                LoggingManager.instance().warning(
                    f"The dataset {self.datasets[dataset_index]} could not be added to the SynchronizedGroup."
                )

            self.lenghts.append(l)

        self.cumsum_lenghts = np.cumsum([0] + self.lenghts[:-1])
예제 #26
0
 def camera_matrix(self):
     """np.ndarray: the 3x3 intrinsics matrix
     
     See also: https://docs.opencv.org/4.0.0/d9/d0c/group__calib3d.html#ga3207604e4b1a1758aa66acb6ed5aa65d
     """
     try:
         matrix = self.intrinsics['matrix']
     except:
         LoggingManager.instance().warning(
             f'Intrinsic matrix of {self.name} not found. Trying to make a generic one.'
         )
         h = self.yml['configurations']['img']['Width']
         v = self.yml['configurations']['img']['Height']
         h_fov = self.yml['configurations']['img']['h_fov']
         matrix = np.identity(3)
         matrix[0, 2] = h / 2
         matrix[1, 2] = v / 2
         matrix[0, 0] = matrix[1,
                               1] = h / (2 * np.tan(h_fov * np.pi / 360.0))
     return matrix
예제 #27
0
    def compute_iou(self, box, return_max=False, map2yaw=None):
        """Compute the iou score between all the elements of self and of box.
            
            Return a matrix len(self), len(box) when row,col are indexed in the same order as self, box.

            If return_max=True: return only a single number for each element of self (the max value).

            Important note: By default the computation is performed in the sbg ref where only one angle (yaw) is not zero, unless
            map2yaw is provided (a callable) which brings all the boxes in a referential where only one rotation (yaw).

        """
        if map2yaw is not None:
            box0 = map2yaw(self)
            box1 = map2yaw(box)
        else:
            try:  #must find either sbg or ENU.
                referential = platform.referential_name('sbgekinox_bcc')
                tf_TargetRef_from_Local = np.copy(
                    self.datasource.sensor.map_to(referential))
                tf_TargetRef_from_Local[:3, :
                                        3] = tf_TargetRef_from_Local[:3, :
                                                                     3] @ self.orientation

            except:
                LoggingManager.instance().warning(
                    'IoU computation, falling back to ENU system transformation.'
                )
                tf_TargetRef_from_Local = np.eye(4)
                tf_TargetRef_from_Local[:3, :3] = np.array(
                    [[0, 0, 1], [-1, 0, 0], [0, -1, 0]], dtype=np.float).T
            box0 = self._mapto(tf=tf_TargetRef_from_Local)
            box1 = box._mapto(tf=tf_TargetRef_from_Local)

        Z0 = [box0['c'], box0['d'], 'z', box0['r'][:, 2]]
        Z1 = [box1['c'], box1['d'], 'z', box1['r'][:, 2]]
        matiou = IoU3d.matrixIoU(Z0=Z0, Z1=Z1)
        if return_max:
            return np.max(matiou, axis=1)
        else:
            return matiou
예제 #28
0
    def __init__(self, pf: 'Platform', yml: dict):
        """Constructor

           Args:
            platform: the platform that holds this Sensors instance
            yml: the YAML database
        """
        self.platform = pf
        self._sensors = {}
        self._ordered_names = []
        self._egomotion_provider = None

        yml_sensor_items = [
            s for s in yml.items() if s[0] not in
            ['ignore', 'virtual_datasources', 'synchronization']
        ]
        yml_items_tqdm = tqdm.tqdm(
            yml_sensor_items, 'Loading sensors'
        ) if self.platform.progress_bar else yml_sensor_items
        for name, value in yml_items_tqdm:

            sensor_type, _ = platform_utils.parse_sensor_name(name)
            if sensor_type in SENSOR_FACTORY:
                self._sensors[name] = SENSOR_FACTORY[sensor_type](
                    name, self.platform)
            else:
                self._sensors[name] = Sensor(name, self.platform)
            self._ordered_names.append(name)

            self._load_offline_datasources(name)

            if 'orientation' in value:
                m = np.array(value['orientation'], dtype='f4')
                if m.shape != (3, 3):
                    LoggingManager.instance().warning(
                        'Ignoring orientation for sensor {}: {} is \
                    not a 3 x 3'.format(name, str(m)))
                else:
                    self._sensors[name].orientation = m

            if 'intrinsics' in value:
                self._load_intrinsics(name, value['intrinsics'])

            if 'extrinsics' in value:
                self._load_extrinsics(name, value['extrinsics'])

            try:
                provider = self._sensors[name].create_egomotion_provider()
            except:
                LoggingManager.instance().warning(
                    f"The 'egomotion_provider' for sensor name {name} could not be created."
                )

            if provider is not None:
                if self._egomotion_provider is not None:
                    LoggingManager.instance().warning(
                        f"Another 'egomotion_provider' found for sensor name {name}, ignoring it."
                    )
                else:
                    self._egomotion_provider = provider
예제 #29
0
def amplitudes_to_color(amplitudes, power=1.0, mask=None, log_normalize=False):

    cm = plt.get_cmap('viridis')
    if mask is None:
        mask = np.ones(amplitudes.shape, dtype=np.bool)

    amplitudes = np.power(amplitudes, power)
    masked_amps = amplitudes[mask]
    if masked_amps.size == 0:
        LoggingManager.instance().warning('Provided mask is empty')
        dmin, dmax = amplitudes.min(), amplitudes.max()
    else:
        dmin, dmax = masked_amps.min(), masked_amps.max()

    if log_normalize:
        norm = matplotlib.colors.LogNorm(dmin, dmax)
    else:
        norm = matplotlib.colors.Normalize(dmin, dmax)

    colors = norm(amplitudes)
    colors = cm(colors)
    colors = (colors[:, :3] * 255).astype('u1')

    return colors
예제 #30
0
    def get_corrected_projection_data(self,
                                      timestamp: Union[int, float],
                                      cache: dict,
                                      type: str = 'directions'):
        """ Returns temperature compensated projections directions or angles

        Args:
            timestamp:  the timestamp at which the sample is needed (use to obtain the right temperature)
            cache:      the cache to search in (obtained by calling LCAx.cache(...))
            type:       the type of directions (e.g) 'directions' or 'quad_directions'. 
                        **Must be present in the cache**

        Returns:
            the compensated directions if any, or the uncompensated direction otherwise
        """

        default_directions = cache[type]

        if self.mirror_temp_compensation is not None:
            try:
                temp = self.get_temperature_at(timestamp)
            except Exception as e:
                LoggingManager.instance().warning(
                    f"Failed to apply temperature correction. Error: {str(e)}")
                return default_directions

            temp = min(max(temp, self.mirror_temp_compensation['t_min']), 60)
            temp_floor = np.floor(temp)
            if (temp - temp_floor) >= 0.5:
                temp = temp_floor + 0.5
            else:
                temp = temp_floor

            return cache[f'{type}_temp_lut'][temp]

        return default_directions