def process_timestamp(self, timestamp, database_connection):

        dset = self.dset_store.get(timestamp, database_connection)

        # set claims on best matches
        self.claim_manager.clear()
        for path in self.path_manager.open_paths:
            mset = scoring.xgboost_learning(path, dset)
            for m in mset.matches:
                self.claim_manager.add_claim(ds.MatchClaim(m[0], m[1], path))
        self.claim_manager.sort_claims()

        # allocate claims
        self.claim_manager.allocate_claims_greedy(timestamp, self.path_manager)
        self.claim_manager.clear()

        # set unsuccessful paths pending
        for path in self.path_manager.open_paths:
            if not path.has_match_at_timestamp(timestamp):
                path.add_match(ds.Match(ds.EmptyDetection(timestamp)))
                if closing.hard_closing(path):
                    self.path_manager.close_path(path)

        # open new path for every detection not already used in any path
        for d in dset.detections:
            if not d.taken:
                new_path = ds.Path(ds.Match(d))
                self.path_manager.appendPath(new_path)
                d.take()
예제 #2
0
    def add_new_path(self):

        if self.path_manager is not None:
            new_path = ds.Path(None)
            self.path_manager.add_path(new_path)
            self.build_path_tree()
            self.build_path_details([new_path])
예제 #3
0
    def process_timestamp(self, timestamp, database_connection):

        if len(self.path_manager.open_paths) > 0:

            # build or expand graph
            self.graph.build(timestamp, database_connection)
            # traverse graph, build hypothesis
            self.graph.traverse(timestamp, self.hypothesis_manager,
                                self.dset_store, database_connection)

            # for every open path set claims on best connections to hypothesis
            for path in self.path_manager.open_paths:
                self.hypothesis_manager.build_connection_claims(
                    path, self.claim_manager, self.dset_store,
                    database_connection)

            # allocate claims
            self.claim_manager.sort_claims()
            self.claim_manager.allocate_claims_greedy()

            self.graph.remove_timestamp(
                timestamp, database_connection
            )  # remove all data from graph for this timestamp

            self.hypothesis_manager.clear()
            self.claim_manager.clear()

            # set unsuccessful paths pending
            for path in self.path_manager.open_paths:
                if not path.has_detection_at_timestamp(timestamp):
                    path.add_detection(ds.EmptyDetection(timestamp))
                    if closing.hard_closing(path):
                        self.path_manager.close_path(path)

        # open new path for every detection not already used in any path
        dset = self.dset_store.get(timestamp, database_connection)
        for d in dset.detections:
            if not d.taken:
                new_path = ds.Path(d)
                self.path_manager.appendPath(new_path)
                d.take()
예제 #4
0
    def load_tracks(self):

        if self.dset_store is None:
            print 'Error: no data folder loaded'
            return

        self.block_inputs(True)

        self.dset_store.delete_path_associations()

        self.path_manager = ds.PathManager(config.PATHS_FILE)

        if os.path.isfile(config.PATHS_FILE):

            try:

                with open(config.PATHS_FILE, 'rb') as paths_file:
                    input = pickle.load(paths_file)

                if self.dset_store.source != input['source']:
                    print 'Warning: data source for detections and paths do not match'
                paths_input = input['paths']

                self.paths_load_progress.setMaximum(len(paths_input))
                self.app.processEvents()

                for i, tag_id in enumerate(paths_input.keys()):

                    self.path_manager.paths[tag_id] = {}

                    for path_id in paths_input[tag_id].keys():

                        path = ds.Path(tag_id)
                        self.path_manager.paths[tag_id][path_id] = path

                        for frame, detection_data in paths_input[tag_id][
                                path_id].items():

                            timestamp = self.dset_store.get_timestamp(frame)
                            if timestamp is not None:

                                detection_id, pos_x, pos_y, readability = detection_data

                                # data point is associated with a detection from the pipeline output
                                if detection_id is not None:

                                    dset = self.dset_store.get(timestamp)

                                    if detection_id in dset.detections:
                                        detection = dset.detections[
                                            detection_id]
                                    else:
                                        print 'Warning: detection_id not found, your truth file does not match your pipeline data. Please rematch!'
                                        continue

                                    # if two paths claim the same detection only the first one gets it
                                    if detection.path is None:
                                        detection.readability = readability
                                        path.add_detection(detection)

                                    # insert empty detection for every following path
                                    else:
                                        detection = ds.EmptyDetection(
                                            timestamp)
                                        detection.position = np.array(
                                            [pos_x, pos_y])
                                        detection.readability = readability
                                        path.add_detection(detection)

                                # data point is an empty detection
                                else:
                                    detection = ds.EmptyDetection(timestamp)
                                    detection.position = np.array(
                                        [pos_x, pos_y])
                                    detection.readability = readability
                                    path.add_detection(detection)

                    self.paths_load_progress.setValue(i + 1)
                    self.app.processEvents()

                self.paths_load_label.setText(
                    str(len(paths_input)) + ' paths loaded')
                self.app.processEvents()

            except:

                pass

        else:

            self.paths_load_progress.setMaximum(1)
            self.paths_load_progress.setValue(1)
            self.paths_load_label.setText('will write to new file')
            self.app.processEvents()

        self.block_inputs(False)