def load_System_masks_from_clickpoints(self, path):
     self.system_db = DataFileExtended(path)
     # self.System_Masks.extend([str(m.image.timestamp) for m in self.system_db.getMasks()])
     timestamps = self.system_db.db.execute_sql(
         "select image.timestamp from image inner join mask on mask.image_id=image.id where mask.data is not null"
     ).fetchall()
     self.System_Masks.extend([str(t[0]) for t in timestamps])
class SegmentationEvaluator(object):
    def __init__(self):
        self.System_Masks = []
        self.GT_Masks = []
        self.gt_db = None
        self.system_db = None
        self.specificity = {}
        self.true_negative_rate = self.specificity
        self.sensitivity = {}
        self.true_positive_rate = self.sensitivity
        self.precision = {}
        self.positive_predictive_value = self.precision
        self.negative_predictive_value = {}
        self.false_positive_rate = {}
        self.fall_out = self.false_positive_rate
        self.false_negative_rate = {}
        self.false_discovery_rate = {}
        self.accuracy = {}
        self.F1_score = {}
        self.MCC = {}
        self.informedness = {}
        self.markedness = {}
        self.positive_rate = {}
        self.LeeLiu_rate = {}

    def load_GT_masks_from_clickpoints(self, path):
        self.gt_db = DataFileExtended(path)
        # self.GT_Masks.extend([str(m.image.timestamp) for m in self.gt_db.getMasks()])
        timestamps = self.gt_db.db.execute_sql(
            "select image.timestamp from image inner join mask on mask.image_id=image.id where mask.data is not null"
        ).fetchall()
        self.GT_Masks.extend([str(t[0]) for t in timestamps])
        # self.GT_Masks.extend(set([str(m.image.timestamp) for m in self.gt_db.getMasks()]).update(set(self.GT_Masks)))

    def load_System_masks_from_clickpoints(self, path):
        self.system_db = DataFileExtended(path)
        # self.System_Masks.extend([str(m.image.timestamp) for m in self.system_db.getMasks()])
        timestamps = self.system_db.db.execute_sql(
            "select image.timestamp from image inner join mask on mask.image_id=image.id where mask.data is not null"
        ).fetchall()
        self.System_Masks.extend([str(t[0]) for t in timestamps])
        # for im in self.system_db.getImages():
        #     if im.mask is not None:
        #         print(im.timestamp)
        #         self.System_Masks.append(str(im.timestamp))
        # self.System_Masks.extend(set([str(m.image.timestamp) for m in self.system_db.getMasks()]).update(set(self.System_Masks_Masks)))

    def match(self, gt_inverse=True, system_inverse=True):
        stamps = set(self.GT_Masks).intersection(self.System_Masks)
        for stamp in stamps:
            if system_inverse:
                sm = ~self.system_db.getMask(image=self.system_db.getImages(
                    timestamp=stamp)[0]).data.astype(bool)
            else:
                sm = self.system_db.getMask(image=self.system_db.getImages(
                    timestamp=stamp)[0]).data.astype(bool)
            if gt_inverse:
                gt = ~self.gt_db.getMask(image=self.gt_db.getImages(
                    timestamp=stamp)[0]).data.astype(bool)
            else:
                gt = self.gt_db.getMask(image=self.gt_db.getImages(
                    timestamp=stamp)[0]).data.astype(bool)
            P = np.sum(gt).astype(float)
            TP = np.sum(sm & gt).astype(float)
            FP = np.sum(sm & (~gt)).astype(float)
            N = np.sum(~gt).astype(float)
            TN = np.sum((~sm) & (~gt)).astype(float)
            FN = np.sum((~sm) & gt).astype(float)
            self.specificity.update({stamp: TN / N})
            self.sensitivity.update({stamp: TP / P})
            self.precision.update({stamp: TP / (TP + FP)})
            self.negative_predictive_value.update({stamp: TN / (TN + FN)})
            self.false_positive_rate.update({stamp: FP / N})
            self.false_negative_rate.update({stamp: FN / (TP + FN)})
            self.false_discovery_rate.update({stamp: FP / (TP + FP)})
            self.accuracy.update({stamp: (TP + TN) / (TP + FN + TN + FP)})
            self.F1_score.update({stamp: 2 * TP / (2 * TP + FP + FN)})
            self.MCC.update({
                stamp: (TP * TN - FP * FN) / ((TP + FP) * (TP + FN) *
                                              (TN + FP) * (TN + FN))
            })
            self.informedness.update({stamp: TP / P + TN / N - 1})
            self.markedness.update(
                {stamp: TP / (TP + FP) + TN / (TN + FN) - 1})
            self.positive_rate.update(
                {stamp: (TP + FP) / (sm.shape[0] * sm.shape[1])})
            self.LeeLiu_rate.update({
                stamp: (TP / P)**2 / ((TP + FP) / (sm.shape[0] * sm.shape[1]))
            })
import numpy as np
import cv2
from PenguTrack.Detectors import FlowDetector, EmperorDetector, rgb2gray
from PenguTrack.Filters import HungarianTracker, KalmanFilter
from PenguTrack.Models import BallisticWSpeed
from PenguTrack.DataFileExtended import DataFileExtended
import matplotlib.pyplot as plt
import scipy.stats as ss
db = DataFileExtended(
    r"D:\User\Alex\Documents\Promotion\RotatingHuddles\DataBases\data1980.cdb")
db.deletetOld()

# FD = FlowDetector()

object_size = 3  # Object diameter (smallest)
q = 1.  # Variability of object speed relative to object size
r = 1.  # Error of object detection relative to object size
log_prob_threshold = -20.  # Threshold for track stopping

model = BallisticWSpeed(dim=2)

# Set up Kalman filter
X = np.zeros(model.State_dim).T  # Initial Value for Position
# Q = np.diag([q * object_size * np.ones(model.Evolution_dim)])  # Prediction uncertainty
Q = np.diag([q * object_size * np.array([0.1, 1.0, 0.1, 1.0])
             ])  # Prediction uncertainty

# R = np.diag([r * object_size * np.ones(model.Meas_dim)])  # Measurement uncertainty
R = np.diag([r * object_size * np.array([0.1, 1.0, 0.1, 1.0])
             ])  # Measurement uncertainty
Example #4
0
if __name__ == '__main__':
    import my_plot
    import numpy as np
    import clickpoints
    import matplotlib.pyplot as plt
    from PenguTrack.DataFileExtended import DataFileExtended

    # Load Data from Databases
    db = DataFileExtended(
        "/home/birdflight/Desktop/PT_Test_full_n3_r7_A20_filtered.cdb")
    db2 = DataFileExtended("/home/birdflight/Desktop/252_GT_Detections.cdb")
    LogType = db.getMarkerType(name="PT_Detection_Marker")
    ImgType = db.getMarkerType(name="PT_Track_Marker")
    # Positions_Log = np.asarray([[m.x, m.y, m.image.sort_index] for m in db.getMarkers(type=LogType) if not m.text.count("inf")])
    Positions_Img = np.asarray([[m.x, m.y, m.image.sort_index]
                                for m in db.getMarkers(type=ImgType)
                                if m.track.markers.count() > 3])

    # Do Correction for Position Data
    from CameraTransform import CameraTransform
    CT = CameraTransform(14, [17, 9], [4608, 2592],
                         observer_height=31.,
                         angel_to_horizon=(np.pi / 2. - 0.24) * 180 / np.pi)
    orth_x, orth_y, orth_z = CT.transCamToWorld(Positions_Img.T[:2], Z=0.525)

    # Calculate Histogramms
    cx = cy = 2
    times = np.asarray(sorted([i.timestamp for i in db.getImages()]))
    scale = 1. / (cx * cy) / ((times[-1] - times[0]).seconds / 3600.)
    hist, binx, biny = np.histogram2d(orth_x,
                                      orth_y,
Example #5
0
from PenguTrack.Filters import KalmanFilter
from PenguTrack.Filters import MultiFilter
# from PenguTrack.Filters import ThreadedMultiFilter as MultiFilter
from PenguTrack.Models import VariableSpeed
from PenguTrack.Detectors import ViBeSegmentation
from PenguTrack.Detectors import Measurement as Pengu_Meas
from PenguTrack.Detectors import SimpleAreaDetector as AreaDetector
from PenguTrack.Detectors import rgb2gray
from PenguTrack.Stitchers import Heublein_Stitcher

import scipy.stats as ss

# Load Database
file_path = "/home/user/Desktop/Birdflight.cdb"
global db
db = DataFileExtended(file_path)

# Initialise PenguTrack
object_size = 1  # Object diameter (smallest)
object_number = 1  # Number of Objects in First Track
object_area = 3

# Initialize physical model as 2d variable speed model with 0.5 Hz frame-rate
model = VariableSpeed(1, 1, dim=2, timeconst=1.)

X = np.zeros(4).T  # Initial Value for Position
Q = np.diag([q*object_size, q*object_size])  # Prediction uncertainty
R = np.diag([r*object_size, r*object_size])  # Measurement uncertainty

State_Dist = ss.multivariate_normal(cov=Q)  # Initialize Distributions for Filter
Meas_Dist = ss.multivariate_normal(cov=R)  # Initialize Distributions for Filter
Example #6
0
from PenguTrack.DataFileExtended import DataFileExtended
from PenguTrack.Filters import KalmanFilter
from PenguTrack.Filters import MultiFilter
from PenguTrack.Models import VariableSpeed
from PenguTrack.Detectors import SiAdViBeSegmentation
from PenguTrack.Detectors import Measurement as Pengu_Meas
from PenguTrack.Detectors import SimpleAreaDetector as AreaDetector
from PenguTrack.Detectors import rgb2gray
from PenguTrack.Stitchers import Heublein_Stitcher

import scipy.stats as ss

# Load Database
file_path = "/home/alex/Masterarbeit/770_PANA/blabla.cdb"
global db
db = DataFileExtended(file_path)

# Initialise PenguTrack
object_size = 0.5  # Object diameter (smallest)
penguin_height = 0.462  #0.575
penguin_width = 0.21
object_number = 300  # Number of Objects in First Track
object_area = 55

# Initialize physical model as 2d variable speed model with 0.5 Hz frame-rate
model = VariableSpeed(1, 1, dim=3, timeconst=0.5)

X = np.zeros(4).T  # Initial Value for Position
Q = np.diag([q * object_size, q * object_size])  # Prediction uncertainty
R = np.diag([r * object_size, r * object_size])  # Measurement uncertainty
Example #7
0
    # Initialize Filter/Tracker
    # MultiKal = MultiFilter(KalmanFilter, model, np.diag(Q),
    #                        np.diag(R), meas_dist=Meas_Dist, state_dist=State_Dist)
    from PenguTrack.Filters import HungarianTracker
    MultiKal = HungarianTracker(KalmanFilter,
                                model,
                                np.diag(Q),
                                np.diag(R),
                                meas_dist=Meas_Dist,
                                state_dist=State_Dist)
    MultiKal.LogProbabilityThreshold = log_prob_threshold

    # Extended Clickpoints Database for usage with pengutack
    from PenguTrack.DataFileExtended import DataFileExtended
    # Open ClickPoints Database
    db = DataFileExtended("./ExampleData/cell_data.cdb")
    # Define ClickPoints Marker
    detection_marker_type = db.setMarkerType(name="Detection_Marker",
                                             color="#FF0000",
                                             style='{"scale":1.2}')
    db.deleteMarkers(type=detection_marker_type)
    track_marker_type = db.setMarkerType(name="Track_Marker",
                                         color="#00FF00",
                                         mode=db.TYPE_Track)
    db.deleteMarkers(type=track_marker_type)
    prediction_marker_type = db.setMarkerType(name="Prediction_Marker",
                                              color="#0000FF")
    db.deleteMarkers(type=prediction_marker_type)
    # Delete Old Tracks
    db.deleteTracks(type=track_marker_type)
Example #8
0
from PenguTrack.Detectors import rgb2gray, TresholdSegmentation
from PenguTrack.Detectors import RegionPropDetector, RegionFilter, ExtendedRegionProps

from skimage.morphology import binary_closing, binary_dilation, binary_opening, binary_erosion
from skimage.morphology import disk
SELEM = disk(2, dtype=bool)

import scipy.stats as ss

# Load Database
a_min = 75
a_max = np.inf
file_path = "/home/alex/Desktop/PT_Cell_T850_A%s_%s_3d.cdb" % (a_min, a_max)

global db
db = DataFileExtended(file_path, "w")

db_start = DataFileExtended(input_file)
db_start2 = DataFileExtended(input_file2)
images = db_start.getImageIterator()


def getImage():
    try:
        im = images.next()
    except StopIteration:
        # print("Done! First!")
        return None, None, None
    fname = im.filename
    d = im.timestamp
    print(fname)
 def load_System_marker_from_clickpoints(self, path, type):
     self.system_db = DataFileExtended(path)
     image_filenames = set([img.filename for img in self.gt_db.getImages()]).intersection([img.filename for img in self.system_db.getImages()])
     markers = np.asarray([[self.gt_db.getImage(filename=m.image.filename).sort_index, m.x, m.y] for m in self.system_db.getMarkers(type=type) if m.image.filename in image_filenames])
     self.System_Markers.update(dict([[t, markers[markers.T[0]==t].T[1:]] for t in set(markers.T[0])]))