Exemple #1
0
def DetectPeople(cfgPath):
    print "hi"

    cam = Camera(cfgPath)

    app = QtWidgets.QApplication([])
    window = GUI(cfgPath)
    window.setCamera(cam)
    window.show()

    # Threading camera
    t_cam = ThreadCamera(cam)
    t_cam.start()

    # Threading GUI
    t_gui = ThreadGUI(window)
    t_gui.start()

    sys.exit(app.exec_())
    t_network.setDaemon(True)  # setting daemon thread to exit
    t_network.start()

    tracker = Tracker(tracker_prop, tracker_lib_prop)
    # Threading Tracker
    t_tracker = ThreadTracker(tracker)
    t_tracker.setDaemon(True)
    t_tracker.start()

    window = GUI()
    cam.setGUI(window)
    cam.setLogger(logger_status)
    cam.setNetwork(network, t_network)
    cam.setTracker(tracker)
    cam.setNetworkParams(image_net_size, confidence)

    # Threading camera
    t_cam = ThreadCamera(cam)
    t_cam.setDaemon(True)
    t_cam.start()
    window.setNetwork(network, t_network)
    window.setTracker(tracker)

    if gui_cfg == 'on':
        window.show()
        # Threading GUI
        t_gui = ThreadGUI(window)
        t_gui.setDaemon(True)
        t_gui.start()

    sys.exit(window.app.exec_())
Exemple #3
0
        print(exc)
        raise SystemExit(
            'Error: Cannot read/parse YML file. Check YAML syntax.')
    except:
        raise SystemExit(
            '\n\tUsage: python2 objectdetector.py objectdetector.yml\n')


if __name__ == '__main__':

    cfg = readConfig()
    cam = selectVideoSource(cfg)
    net_prop, DetectionNetwork = selectNetwork(cfg)

    # Threading the camera...
    t_cam = ThreadCamera(cam)
    t_cam.start()

    network = DetectionNetwork(net_prop)
    network.setCamera(cam)
    t_network = ThreadNetwork(network)
    t_network.start()

    app = QtWidgets.QApplication(sys.argv)
    window = GUI()
    window.setCamera(cam, t_cam)
    window.setNetwork(network, t_network)
    window.show()

    # Threading GUI
    t_gui = ThreadGUI(window)
        # GUI version with depth image
        from GUI.gui import DepthGUI as GUIClass
        # Turtlebot motors
        from Motors.Kobuki.motors import Motors
        motors_proxy = jdrc.getMotorsClient('FollowPerson.Motors')
        # PT motors for EVI camera
    elif device_type.lower() == 'ptz':
        from GUI.gui import GUI as GUIClass
        from Motors.PTZ.motors import Motors
        motors_proxy = jdrc.getPTMotorsClient('FollowPerson.PTMotors')
    else:
        raise SystemExit(('%s not supported! Supported devices: Kobuki, PTZ') %
                         (device_type))

    cam = Camera(cam_proxy)
    t_cam = ThreadCamera(cam)
    t_cam.start()

    network = TrackingNetwork(net_prop)
    network.setCamera(cam)
    t_network = ThreadNetwork(network)
    t_network.start()

    mom_path = cfg.getProperty('FollowPerson.Mom.ImagePath')

    siamese_network = SiameseNetwork(siamese_model, mom_path)

    app = QtWidgets.QApplication(sys.argv)
    window = GUIClass()

    if device_type.lower() == 'kobuki':
    data = readConfig()
    cam, cam_depth = selectVideoSource(data)

    viz3d = None
    if cam_depth:
        viz3d = init_viz()
        window = GUI3D(cam, cam_depth)
    else:
        window = GUI(cam)
    window.show()

    estimator = Estimator(cam, cam_depth, viz3d, window, data["Estimator"])

    # Threading camera
    t_cam = ThreadCamera(cam)
    t_cam.setDaemon(True)
    t_cam.start()
    t_cam_depth = ThreadCamera(cam_depth)
    t_cam_depth.setDaemon(True)
    t_cam_depth.start()

    # Threading estimator
    t_estimator = ThreadEstimator(estimator)
    t_estimator.setDaemon(True)
    t_estimator.start()

    # Threading GUI
    t_gui = ThreadGUI(window)
    t_gui.setDaemon(True)
    t_gui.start()