def load_System_masks_from_clickpoints(self, path):
     self.system_db = DataFileExtended(path)
     # self.System_Masks.extend([str(m.image.timestamp) for m in self.system_db.getMasks()])
     timestamps = self.system_db.db.execute_sql(
         "select image.timestamp from image inner join mask on mask.image_id=image.id where mask.data is not null"
     ).fetchall()
     self.System_Masks.extend([str(t[0]) for t in timestamps])
Esempio n. 2
0
 def CreateDB(self, Folder):
     Files = self.MatchedFiles[Folder]
     db_path = os.path.sep.join(Folder.split(os.path.sep)[:-1] + [""])
     db_name = self.name_from_path(Folder)
     db = DataFileExtended(db_path + db_name + ".cdb", "w")
     path = db.setPath(Folder)
     idx_dict = {}
     for file in Files:
         # self.Bar2.setValue(self.Bar2.value()+1)
         self.Bar2.increase()
         # QtGui.QApplication.processEvents()
         QtGui.QGuiApplication.processEvents()
         layer = self.layer_dict[[
             k for k in self.layer_dict if file.count(k)
         ][0]]
         time = datetime.datetime.strptime(
             file.split("_")[0], "%Y%m%d-%H%M%S")
         idx = int([k[3:] for k in file.split("_") if k.count("rep")][0])
         if len(idx_dict) < 1:
             idx_dict.update({idx: 0})
         elif idx not in idx_dict:
             idx_dict.update({idx: max(idx_dict.values()) + 1})
         image = db.setImage(filename=file,
                             path=path,
                             layer=layer,
                             timestamp=time)  # , frame=int(idx))
         image.sort_index = idx  #idx_dict[idx]
         image.save()
Esempio n. 3
0
 def Track(self, Folder):
     db_name = self.name_from_path(Folder)
     db_path = os.path.sep.join(Folder.split(os.path.sep)[:-1] + [""])
     if not os.path.exists(db_path + db_name + ".cdb"):
         self.CreateDB(Folder, self.MatchedFiles[Folder])
     db = DataFileExtended(db_path + db_name + ".cdb")
     print([i.sort_index for i in db.getImageIterator()])
     TCell_Analysis.Track(db, progress_bar=self.Bar3)
    def save_marker_to_System_db(self, markers, path, type):
        if function is None:
            function = lambda x : x
        self.system_db = DataFileExtended(path)

        with self.system_db.db.atomic() as transaction:
            self.system_db.deleteMarkers(type=type)
            for t in markers:
                self.system_db.setMarkers(image=self.system_db.getImage(frame=t),x=markers[t].T[0],y=markers[t].T[1], type=type)
Esempio n. 5
0
def load_tracks(path, type):
    database = DataFileExtended(path)
    Tracks = {}
    if database.getTracks(type=type)[0].markers[0].measurement is not None:
        for track in database.getTracks(type=type):
            Tracks.update({track.id: Filter(VariableSpeed(dim=3))})
            for m in track.markers:
                meas = Measurement(1., [m.measurement[0].x,
                                        m.measurement[0].y,
                                        m.measurement[0].z])
                Tracks[track.id].update(z=meas, i=m.image.sort_index)
    else:
        for track in database.getTracks(type=type):
            Tracks.update({track.id: Filter(VariableSpeed(dim=3))})
            for m in track.markers:
                meas = Measurement(1., [m.x,
                                        m.y,
                                        0])
                Tracks[track.id].update(z=meas, i=m.image.sort_index)
    return Tracks
Esempio n. 6
0
def CreateDB(path, files, progress_bar = None):
    Folder = os.path.normpath(path)
    Files = [os.path.normpath(f) for f in files]
    db_path = os.path.sep.join(Folder.split(os.path.sep)[:-1]+[""])

    db_name = name_from_path(Folder)
    # db_path = "/home/alex/2017-03-10_Tzellen_microwells_bestdata/T-CellMotility/2017-10-17/1_2Gel/24hnachMACS/24himGel/Kontrolle/RB/"
    db = DataFileExtended(db_path+db_name+".cdb", "w")
    path = db.setPath(Folder)
    idx_dict = {}
    for file in Files:
        if progress_bar is not None:
            progress_bar.increase()
        layer = LAYER_DICT[[k for k in LAYER_DICT if file.count(k)][0]]
        time = datetime.datetime.strptime(file.split("_")[0], "%Y%m%d-%H%M%S")
        idx = int([k[3:] for k in file.split("_") if k.count("rep")][0])
        if len(idx_dict)<1:
            idx_dict.update({idx: 0})
        elif idx not in idx_dict:
            idx_dict.update({idx: max(idx_dict.values())+1})
        image = db.setImage(filename=file, path=path, layer=layer, timestamp=time)  # , frame=int(idx))
        image.sort_index = idx#idx_dict[idx]
        image.save()
    return db_path+db_name+".cdb"
Esempio n. 7
0
def AnalyzeDB(db_str):
    db = DataFileExtended(db_str)
    time_step = 110
    v_fac = 0.645 / (time_step / 60.)
    perc = 30
    step = 20
    type = 'PT_Track_Marker'
    Frame_list = []
    for f in db.getImageIterator():
        Frame_list.append(f.sort_index)
    Frames = np.max(Frame_list)

    timer()
    Tracks = TCell_Analysis.load_tracks(db_str, type)
    timer("Loading Tracks")

    Z_posis = TCell_Analysis.getZpos(Tracks, v_fac)
    timer("Fetching Z")

    Z_posis = np.asarray(Z_posis)
    tracks_to_delete = TCell_Analysis.get_Tracks_to_delete(Z_posis, perc)
    timer("Getting Tracks to delete")

    ###
    list2 = TCell_Analysis.create_list2(db)  # Create List for true dist
    timer("Creating List2")

    drift, drift_list, missing_frame = TCell_Analysis.Drift(
        Frames, list2, 5)  # Create List with offsets
    timer("Getting Drift")

    list2 = TCell_Analysis.list2_with_drift(
        db, drift, tracks_to_delete,
        del_track=True)  # Create list for true dist with drift_cor
    timer("List2 with drift")

    list = TCell_Analysis.create_list(
        Frames, db, drift=drift, Drift=True,
        Missing=missing_frame)  # Create List for analysis
    timer("Create List")
    ### For Deleting Tracks above and below
    list_copy = list[:]
    for l, m in enumerate(list):
        keys = m.keys()
        for k, j in enumerate(keys):
            if j in tracks_to_delete:
                del list_copy[l][j]
    timer("Stuff")

    ###
    print("bla")
    directions, velocities, dirt, alternative_vel, vel_mean, dir_mean, alt_vel_mean = TCell_Analysis.measure(
        step, time_step, list, Frames)  # Calculate directions and velocities
    timer("Measure")

    motile_percentage, mean_v, mean_dire, number, len_count, mo_p_al, me_v_al, me_d_al = TCell_Analysis.values(
        directions,
        velocities,
        db,
        dirt,
        alternative_vel,
        tracks_to_delete,
        del_Tracks=True)
    timer("Values")
    motile_per_true_dist, real_dirt = TCell_Analysis.motiletruedist(list2)
    timer("Motile True Dist")

    if not os.path.exists(db_str[:-4] + "_analyzed.txt"):
        with open(db_str[:-4] + "_analyzed.txt", "w") as f:
            f.write(
                'Day       \t\t\tData                              \t\t\tMotile % true dist\t\t\tMotile in %\t\t\tMean velocity\t\t\tMean Directionality\t\t\tMean vel dt1\t\t\t#Tracks\t\t\t#Evaluated Tracks\t\t\t#Dirt\n'
            )

    Day = getDateFromPath(db_str).strftime("%Y-%M-%d")
    if db_str.count('TCell') or db_str.count('T-Cell'):
        TCell_Analysis.Colorplot(
            directions,
            velocities,
            db_str,
            path=os.path.sep.join(db_str.split(os.path.sep)[:-1]),
            Save=True)  # Save the velocity vs directionality picture
        with open(db_str[:-4] + "_analyzed.txt", "ab") as f:
            f.write(
                '%s\t\t\t%34s\t\t\t%18f\t\t\t%11f\t\t\t%13f\t\t\t%19f\t\t\t%12f\t\t\t%7d\t\t\t%17d\t\t\t%5d\n'
                % (Day, db_str, motile_per_true_dist, motile_percentage,
                   mean_v, mean_dire, me_v_al, number, len_count, real_dirt))
    elif db_str.count('NKCell'):
        TCell_Analysis.Colorplot(directions,
                                 velocities,
                                 db_str,
                                 path=os.path.sep.join(
                                     db_str.split(os.path.sep)[:-1]),
                                 Save=True)
        with open(db_str[:-4] + "_analyzed.txt", 'ab') as f:
            f.write(
                '%s\t\t\t%34s\t\t\t%18f\t\t\t%11f\t\t\t%13f\t\t\t%19f\t\t\t%12f\t\t\t%7d\t\t\t%17d\t\t\t%5d\n'
                % (Day, db_str, motile_per_true_dist, motile_percentage,
                   mean_v, mean_dire, me_v_al, number, len_count, real_dirt))
    db.db.close()

    timer("Write")
Esempio n. 8
0
        cov=Q)  # Initialize Distributions for Filter
    Meas_Dist = ss.multivariate_normal(
        cov=R)  # Initialize Distributions for Filter
    # Initialize Filter/Tracker
    MultiKal = MultiFilter(KalmanFilter,
                           model,
                           np.diag(Q),
                           np.diag(R),
                           meas_dist=Meas_Dist,
                           state_dist=State_Dist)
    MultiKal.LogProbabilityThreshold = log_prob_threshold

    # Extended Clickpoints Database for usage with pengutack
    from PenguTrack.DataFileExtended import DataFileExtended
    # Open ClickPoints Database
    db = DataFileExtended("./ExampleData/sim_data.cdb", "w")
    # Define ClickPoints Marker
    detection_marker_type = db.setMarkerType(name="Detection_Marker",
                                             color="#FF0000",
                                             style='{"scale":1.2}')
    db.deleteMarkers(type=detection_marker_type)
    track_marker_type = db.setMarkerType(name="Track_Marker",
                                         color="#00FF00",
                                         mode=db.TYPE_Track)
    db.deleteMarkers(type=track_marker_type)
    prediction_marker_type = db.setMarkerType(name="Prediction_Marker",
                                              color="#0000FF")
    db.deleteMarkers(type=prediction_marker_type)
    # Delete Old Tracks
    db.deleteTracks(type=track_marker_type)
Esempio n. 9
0
# from PenguTrack.Detectors import SimpleAreaDetector as AreaDetector

from skimage.morphology import binary_closing, binary_dilation, binary_opening, binary_erosion
from skimage.morphology import disk
SELEM = disk(2,dtype=bool)

import scipy.stats as ss

# Load Database
# file_path = "/home/birdflight/Desktop/PT_Test.cdb"
file_path = "/mnt/mmap/Starter_Full.cdb"
# file_path = "/mnt/mmap/PT_Test3.cdb"
# file_path = "/mnt/mmap/PT_Test4.cdb"

global db
db = DataFileExtended(file_path,"w")

db_start = DataFileExtended("/home/birdflight/Desktop/Starter.cdb")
# images = db_start.getImageIterator(start_frame=2490-30, end_frame=2600)
# images = db_start.getImageIterator(start_frame=1936-210, end_frame=2600)
images = db_start.getImageIterator(start_frame=700)
# images = db_start.getImageIterator(start_frame=1936-20-90, end_frame=2600)
# images = db_start.getImageIterator(start_frame=1500, end_frame=2600)

# images = db_start.getImageIterator()
def getImage():
    im = images.next()
    fname = im.filename
    from datetime import datetime
    d = datetime.strptime(fname[0:15], '%Y%m%d-%H%M%S')
    time_unix = np.uint32(time.mktime(d.timetuple()))
Esempio n. 10
0
    # Initialize Filter/Tracker
    # MultiKal = MultiFilter(KalmanFilter, model, np.diag(Q),
    #                        np.diag(R), meas_dist=Meas_Dist, state_dist=State_Dist)
    from PenguTrack.Filters import HungarianTracker
    MultiKal = HungarianTracker(KalmanFilter,
                                model,
                                np.diag(Q),
                                np.diag(R),
                                meas_dist=Meas_Dist,
                                state_dist=State_Dist)
    MultiKal.LogProbabilityThreshold = log_prob_threshold

    # Extended Clickpoints Database for usage with pengutack
    from PenguTrack.DataFileExtended import DataFileExtended
    # Open ClickPoints Database
    db = DataFileExtended("./ExampleData/cell_data.cdb")
    # Define ClickPoints Marker
    detection_marker_type = db.setMarkerType(name="Detection_Marker",
                                             color="#FF0000",
                                             style='{"scale":1.2}')
    db.deleteMarkers(type=detection_marker_type)
    track_marker_type = db.setMarkerType(name="Track_Marker",
                                         color="#00FF00",
                                         mode=db.TYPE_Track)
    db.deleteMarkers(type=track_marker_type)
    prediction_marker_type = db.setMarkerType(name="Prediction_Marker",
                                              color="#0000FF")
    db.deleteMarkers(type=prediction_marker_type)
    # Delete Old Tracks
    db.deleteTracks(type=track_marker_type)
 def load_GT_marker_from_clickpoints(self, path, type):
     self.gt_db = DataFileExtended(path)
     markers = np.asarray([[m.image.sort_index, m.x, m.y] for m in self.gt_db.getMarkers(type=type)])
     self.GT_Markers.update(dict([[t, markers[markers.T[0]==t].T[1:]] for t in set(markers.T[0])]))
Esempio n. 12
0
from skimage.morphology import binary_closing, binary_dilation, binary_opening, binary_erosion
from skimage.morphology import disk
SELEM = disk(2, dtype=bool)

import scipy.stats as ss

# Load Database
# file_path = "/home/birdflight/Desktop/PT_Test.cdb"
file_path = "/home/user/Desktop/PT_Test_full_n3_r7_A20.cdb"
c = 20
# file_path = "/mnt/mmap/PT_Test3.cdb"
# file_path = "/mnt/mmap/PT_Test4.cdb"

global db
db = DataFileExtended(file_path, "w")

db_start = DataFileExtended(input_file)
# images = db_start.getImageIterator(start_frame=2490-30, end_frame=2600)
# images = db_start.getImageIterator(start_frame=1936-210, end_frame=2600)
# images = db_start.getImageIterator(end_frame=52)
# images = db_start.getImageIterator(start_frame=1936-20-90, end_frame=2600)
# images = db_start.getImageIterator(start_frame=1500, end_frame=2600)

images = db_start.getImageIterator()


def getImage():
    try:
        im = images.next()
    except StopIteration:
Esempio n. 13
0
                                 Save=True)  # Save the velocity vs directionality picture
        with open(db_str[:-4] + "_analyzed.txt", "a") as f:
            f.write(
                '%s\t\t\t%34s\t\t\t%18f\t\t\t%11f\t\t\t%13f\t\t\t%19f\t\t\t%12f\t\t\t%7d\t\t\t%17d\t\t\t%5d\n' % (
                    Day, db_str, motile_per_true_dist, motile_percentage, mean_v, mean_dire, me_v_al, number,
                    len_count, real_dirt))
    elif db_str.count('NKCell'):
        Analysis_Tools.Colorplot(directions, velocities, db_str.split(os.path.sep)[-1][::-4],
                                 path=os.path.sep.join(db_str.split(os.path.sep)[:-1]),
                                 Save=True)
        with open(db_str[:-4] + "_analyzed.txt", 'a') as f:
            f.write('%s\t\t\t%34s\t\t\t%18f\t\t\t%11f\t\t\t%13f\t\t\t%19f\t\t\t%12f\t\t\t%7d\t\t\t%17d\t\t\t%5d\n' % (
                Day, db_str, motile_per_true_dist, motile_percentage, mean_v, mean_dire, me_v_al, number,
                len_count, real_dirt))
    db.db.close()

if __name__ == "__main__":
    # AnalyzeDB("/home/alex/2017-03-10_Tzellen_microwells_bestdata/1T-Cell-Motility_2017-10-17_1_2Gel_24hnachMACS_24himGel_Kontrolle_RB_1.cdb")
    # AnalyzeDB(r"Z:\T-Cell-Motility\2017-10-17\1_2Gel\24hnachMACS\1himGel\Kontrolle\RB\4T-Cell-Motility_2017-10-17_1_2Gel_24hnachMACS_1himGel_Kontrolle_RB_4_stitched2.cdb")
    Matches, Matched_Files = Crawl_Folder("/mnt/cmark2/T-Cell-Motility/2017-10-17/1_2Gel/24hnachMACS/24himGel/Kontrolle/RB/")
    Database_paths =[]
    for m in Matches:
        Database_paths.append(CreateDB(m, Matched_Files[m]))
    for db_path in Database_paths:
        db = DataFileExtended(db_path)
        Track(db, None)
    for db_path in Database_paths:
        Stitch(db_path)
    for db_path in Database_paths:
        AnalyzeDB(db_path)
Esempio n. 14
0
if __name__ == '__main__':
    import my_plot
    import numpy as np
    import clickpoints
    import matplotlib.pyplot as plt
    from PenguTrack.DataFileExtended import DataFileExtended

    # Load Data from Databases
    db = DataFileExtended(
        "/home/birdflight/Desktop/PT_Test_full_n3_r7_A20_filtered.cdb")
    db2 = DataFileExtended("/home/birdflight/Desktop/252_GT_Detections.cdb")
    LogType = db.getMarkerType(name="PT_Detection_Marker")
    ImgType = db.getMarkerType(name="PT_Track_Marker")
    # Positions_Log = np.asarray([[m.x, m.y, m.image.sort_index] for m in db.getMarkers(type=LogType) if not m.text.count("inf")])
    Positions_Img = np.asarray([[m.x, m.y, m.image.sort_index]
                                for m in db.getMarkers(type=ImgType)
                                if m.track.markers.count() > 3])

    # Do Correction for Position Data
    from CameraTransform import CameraTransform
    CT = CameraTransform(14, [17, 9], [4608, 2592],
                         observer_height=31.,
                         angel_to_horizon=(np.pi / 2. - 0.24) * 180 / np.pi)
    orth_x, orth_y, orth_z = CT.transCamToWorld(Positions_Img.T[:2], Z=0.525)

    # Calculate Histogramms
    cx = cy = 2
    times = np.asarray(sorted([i.timestamp for i in db.getImages()]))
    scale = 1. / (cx * cy) / ((times[-1] - times[0]).seconds / 3600.)
    hist, binx, biny = np.histogram2d(orth_x,
                                      orth_y,
import numpy as np
import cv2
from PenguTrack.Detectors import FlowDetector, EmperorDetector, rgb2gray
from PenguTrack.Filters import HungarianTracker, KalmanFilter
from PenguTrack.Models import BallisticWSpeed
from PenguTrack.DataFileExtended import DataFileExtended
import matplotlib.pyplot as plt
import scipy.stats as ss
db = DataFileExtended(
    r"D:\User\Alex\Documents\Promotion\RotatingHuddles\DataBases\data1980.cdb")
db.deletetOld()

# FD = FlowDetector()

object_size = 3  # Object diameter (smallest)
q = 1.  # Variability of object speed relative to object size
r = 1.  # Error of object detection relative to object size
log_prob_threshold = -20.  # Threshold for track stopping

model = BallisticWSpeed(dim=2)

# Set up Kalman filter
X = np.zeros(model.State_dim).T  # Initial Value for Position
# Q = np.diag([q * object_size * np.ones(model.Evolution_dim)])  # Prediction uncertainty
Q = np.diag([q * object_size * np.array([0.1, 1.0, 0.1, 1.0])
             ])  # Prediction uncertainty

# R = np.diag([r * object_size * np.ones(model.Meas_dim)])  # Measurement uncertainty
R = np.diag([r * object_size * np.array([0.1, 1.0, 0.1, 1.0])
             ])  # Measurement uncertainty
Esempio n. 16
0
from PenguTrack.DataFileExtended import DataFileExtended
import numpy as np

db = DataFileExtended(
    "/home/alex/Masterarbeit/Data/Cells/DataBases/PT_Cell_T850_A75_inf_3d_backup.cdb"
)
tracks = db.getTracks(type="PT_Track_Marker")

# db = DataFileExtended("/home/alex/Desktop/PT_Cell_GT_Track.cdb")
# tracks = db.getTracks(type="GroundTruth")
V = []
vals = {}
for track in tracks:
    if track.markers.count() < 4:
        continue
    x, y, z, t = np.array([[
        m.measurement[0].x * 0.646, m.measurement[0].y * 0.645,
        m.measurement[0].z * 0.645, m.image.timestamp
    ] for m in track.markers]).T
    # x,y,z,t = np.array([[m.x*0.646, m.y*0.645, 0, m.image.timestamp] for m in track.markers]).T
    n = 10.
    # z = np.convolve(z, np.ones(n)/n, mode="same") if len(z)>n else z
    t = (np.array([tt.total_seconds() for tt in t[1:] - t[:-1]], dtype=float))
    vx = ((x[1:] - x[:-1]) / t).astype(float)
    vy = ((y[1:] - y[:-1]) / t).astype(float)
    vz = ((z[1:] - z[:-1]) / t).astype(float)
    # vz = np.convolve(vz, np.ones(10)/10., mode="same") if len(vz)>10 else vz

    # v = (vx**2+vy**2+vz**2)**0.5
    v = (vx**2 + vy**2)**0.5
    print(np.nanmean(v), np.mean(np.abs(vz)), np.mean(vx), np.mean(vy))
def AnalyzeDB(db_str):
    db = DataFileExtended(db_str)
    time_step = 110
    v_fac = 0.645 / (time_step / 60.)
    perc = 30
    step = 20
    type = 'PT_Track_Marker'
    Frame_list = [f.sort_index for f in db.getImages()]
    Frames = np.amax(Frame_list)

    timer()
    Tracks = db.PT_tracks_from_db(type)
    timer("Normal Tracks")
    Tracks_woMeasurements = db.PT_tracks_from_db(type, get_measurements=False)
    timer("Tracks WO")

    Z_posis = Z_from_PT_Track(Tracks, v_fac)
    timer("Z_posis")

    tracks_to_delete = deletable_tracks(Z_posis, perc)
    timer("Trackstodelete")

    list2 = getXY(Tracks_woMeasurements)  # Create List for true dist
    timer("GETXY")

    drift, drift_list, missing_frame = TCell_Analysis.Drift(
        Frames, list2, 5)  # Create List with offsets
    timer("Drift")
    list2 = TCell_Analysis.list2_with_drift(
        db, drift, tracks_to_delete,
        del_track=True)  # Create list for true dist with drift_cor

    list2 = getXY_drift_corrected(Tracks_woMeasurements,
                                  np.vstack([[0, 0], drift]))
    timer("CorrectXY")

    list1 = analysis_list_from_tracks(
        Frames,
        Tracks_woMeasurements,
        drift=drift,
        Drift=True,
        Missing=missing_frame)  # Create List for analysis

    ### For Deleting Tracks above and below
    list_copy = list1[:]
    for l, m in enumerate(list1):
        keys = m.keys()
        for k, j in enumerate(keys):
            if j in tracks_to_delete:
                del list_copy[l][j]

    directions, velocities, dirt, alternative_vel, vel_mean, dir_mean, alt_vel_mean = TCell_Analysis.measure(
        step, time_step, list1, Frames)  # Calculate directions and velocities
    timer("Measure")

    motile_percentage, mean_v, mean_dire, number, len_count, mo_p_al, me_v_al, me_d_al = TCell_Analysis.values(
        directions,
        velocities,
        db,
        dirt,
        alternative_vel,
        tracks_to_delete,
        del_Tracks=True)
    timer("Values")
    motile_per_true_dist, real_dirt = TCell_Analysis.motiletruedist(list2)
    timer("Motile True Dist")
 def load_neg_System_marker_from_clickpoints(self, path, type):
     self.system_db = DataFileExtended(path)
     markers = np.asarray([[self.gt_db.getImage(filename=m.image.filename).sort_index, m.x, m.y] for m in self.system_db.getMarkers(type=type)
                           if self.gt_db.getImage(filename=m.image.filename) is not None])
     self.neg_System_Markers.update(dict([[t, markers[markers.T[0]==t].T[1:]] for t in set(markers.T[0])]))
Esempio n. 19
0
import numpy as np
import seaborn as sn
import matplotlib.pyplot as plt
from PenguTrack.DataFileExtended import DataFileExtended

file_path = "/home/user/Desktop/Birdflight.cdb"
db = DataFileExtended(file_path)

PT_Track_Type = db.getMarkerType(name="PT_Track_Marker")
PT_Detection_Type = db.getMarkerType(name="PT_Detection_Marker")
PT_Prediction_Type = db.getMarkerType(name="PT_Prediction_Marker")
GT_Type = db.getMarkerType(name="GT")

Tracks = db.getTracks(type=PT_Track_Type)

mean_phi = []
phies = []
std_phi = []
mean_len = []
lens = []

for track in Tracks:
    if track.markers.count() < 3:
        continue
    x = np.asarray([[m.x, m.y] for m in track.markers])
    del_x = x[1:] - x[:-1]
    del_x_norm = np.linalg.norm(del_x, axis=1)
    try:
        # phi = np.tensordot(del_x[1:],del_x[:-1].T, axes=1)/del_x_norm[1:]/del_x_norm[:-1]
        phi = np.diag(np.tensordot(del_x[1:], del_x[:-1].T,
                                   axes=1)) / del_x_norm[1:] / del_x_norm[:-1]
Esempio n. 20
0
 def load_System_marker_from_clickpoints(self, path, type):
     self.system_db = DataFileExtended(path)
     image_filenames = set([img.filename for img in self.gt_db.getImages()]).intersection([img.filename for img in self.system_db.getImages()])
     markers = np.asarray([[self.gt_db.getImage(filename=m.image.filename).sort_index, m.x, m.y] for m in self.system_db.getMarkers(type=type) if m.image.filename in image_filenames])
     self.System_Markers.update(dict([[t, markers[markers.T[0]==t].T[1:]] for t in set(markers.T[0])]))