Example #1
0
def test_display():
    util.init()
    app = qw.QApplication(sys.argv)
    view = FrameView()
    image = cv2.imread(
        'C:/Users/raysu/analysis/animal_tracking/bugtracking/training_images/'
        'prefix_1500.png')
    view.setFrame(image, 0)
    win = qw.QMainWindow()
    toolbar = win.addToolBar('Zoom')
    zi = qw.QAction('Zoom in')
    zi.triggered.connect(view.zoomIn)
    zo = qw.QAction('Zoom out')
    zo.triggered.connect(view.zoomOut)
    arena = qw.QAction('Select arena')
    arena.triggered.connect(view.scene().setArenaMode)
    arena_reset = qw.QAction('Rset arena')
    arena_reset.triggered.connect(view.scene().resetArena)
    roi = qw.QAction('Select rectangular ROIs')
    roi.triggered.connect(view.scene().setRoiRectMode)
    poly = qw.QAction('Select polygon ROIs')
    poly.triggered.connect(view.scene().setRoiPolygonMode)
    toolbar.addAction(zi)
    toolbar.addAction(zo)
    toolbar.addAction(arena)
    toolbar.addAction(roi)
    toolbar.addAction(poly)
    toolbar.addAction(arena_reset)
    win.setCentralWidget(view)
    win.show()
    sys.exit(app.exec_())
Example #2
0
import sys
import logging
import threading
import numpy as np
import os
import time
from datetime import timedelta

from PyQt5 import (QtWidgets as qw, QtCore as qc, QtGui as qg)

from argos import utility
from argos.frameview import FrameView
from argos.vreader import VideoReader
from argos import writer

settings = utility.init()


class VidInfo(qw.QMainWindow):
    def __init__(self):
        super(VidInfo, self).__init__()
        self.setWindowTitle('Video/Data Information')
        self.vidfile_label = qw.QLabel('Video file')
        self.vidfile = qw.QLabel('')
        self.frames_label = qw.QLabel('Number of frames')
        self.frames = qw.QLabel('')
        self.fps_label = qw.QLabel('Frames per second')
        self.fps = qw.QLabel('')
        self.outfile_label = qw.QLabel('Output files')
        self.outfile = qw.QLabel('')
        self.width_label = qw.QLabel('Frame width')
Example #3
0
import logging
from collections import OrderedDict
import time
import numpy as np
import cv2
from PyQt5 import (QtWidgets as qw, QtCore as qc, QtGui as qg)

import argos.constants as consts
from argos import utility as ut
from argos.frameview import FrameView
from argos.segment import (segment_by_dbscan, segment_by_contours,
                           segment_by_contour_bbox, segment_by_watershed,
                           extract_valid, get_bounding_poly)

settings = ut.init()

segstep_dict = OrderedDict([
    ('Final', consts.SegStep.final),
    ('Blurred', consts.SegStep.blur),
    ('Thresholded', consts.SegStep.threshold),
    ('Segmented', consts.SegStep.segmented),
    ('Filtered', consts.SegStep.filtered),
])

segmethod_dict = OrderedDict([
    ('Threshold', consts.SegmentationMethod.threshold),
    ('Contour', consts.SegmentationMethod.contour),
    ('Watershed', consts.SegmentationMethod.watershed),
    ('DBSCAN', consts.SegmentationMethod.dbscan)
])
Example #4
0
import cv2
from typing import Dict, List
from matplotlib import cm
from PyQt5 import (
    QtCore as qc,
    QtGui as qg,
    QtWidgets as qw
)
import sip

import argos.utility as util
from argos.constants import DrawingGeom, ColorMode
from argos.utility import cv2qimage, make_color, get_cmap_color


settings = util.init()


class FrameScene(qw.QGraphicsScene):
    sigPolygons = qc.pyqtSignal(dict)
    sigPolygonsSet = qc.pyqtSignal()
    sigArena = qc.pyqtSignal(qg.QPolygonF)
    sigFontSizePixels = qc.pyqtSignal(int)
    sigMousePos = qc.pyqtSignal(qc.QPointF)

    def __init__(self, *args, **kwargs):
        super(FrameScene, self).__init__(*args, **kwargs)
        self.roi = None
        self.frameno = -1
        self.arena = None
        self.arenaPolygon = None
Example #5
0
.. _SORT: https://github.com/abewley/sort

.. _DeepSORT: https://github.com/nwojke/deep_sort.

"""
import logging
import numpy as np
import cv2
from PyQt5 import (QtCore as qc)

import argos.constants
from argos import utility as au
from argos.utility import match_bboxes

settings = au.init()


class KalmanTracker(object):
    """This class tries to improve performance over SORT or DeepSORT by using
    opencv's builtin Kalman Filter. OpenCV being written in C/C++ it outperforms
    the Python code in DeepSORT or filterpy (used in SORT).

    In my test, the predict step in OpenCV takes

    2.78 µs ± 14.7 ns per loop (mean ± std. dev. of 7 runs, 100000 loops each)

    compared to DeepSORT taking

    45.7 µs ± 1.24 µs per loop (mean ± std. dev. of 7 runs, 10000 loops each)
    """