Exemple #1
0

if usePerception:
    segmentationpanel.init()
    cameraview.init()
    colorize.init()

    cameraview.cameraView.initImageRotations(robotStateModel)
    cameraview.cameraView.rayCallback = segmentation.extractPointsAlongClickRay

    if useMultisense:
        multisensepanel.init(perception.multisenseDriver)
    else:
        app.removeToolbarMacro('ActionMultisensePanel')

    sensordatarequestpanel.init()

    # for kintinuous, use 'CAMERA_FUSED', 'CAMERA_TSDF'
    disparityPointCloud = segmentation.DisparityPointCloudItem('stereo point cloud', 'CAMERA', 'CAMERA_LEFT', cameraview.imageManager)
    disparityPointCloud.addToView(view)
    om.addToObjectModel(disparityPointCloud, parentObj=om.findObjectByName('sensors'))

    def createPointerTracker():
        return trackers.PointerTracker(robotStateModel, disparityPointCloud)


if useOpenniDepthImage:
    openniDepthPointCloud = segmentation.DisparityPointCloudItem('openni point cloud', 'OPENNI_FRAME', 'OPENNI_FRAME_LEFT', cameraview.imageManager)
    openniDepthPointCloud.addToView(view)
    om.addToObjectModel(openniDepthPointCloud, parentObj=om.findObjectByName('sensors'))
Exemple #2
0
    app.removeToolbarMacro('ActionAtlasDriverPanel')

if usePerception:
    segmentationpanel.init()
    cameraview.init()
    colorize.init()

    cameraview.cameraView.initImageRotations(robotStateModel)
    cameraview.cameraView.rayCallback = segmentation.extractPointsAlongClickRay

    if useMultisense:
        multisensepanel.init(perception.multisenseDriver)
    else:
        app.removeToolbarMacro('ActionMultisensePanel')

    sensordatarequestpanel.init()

    # for kintinuous, use 'CAMERA_FUSED', 'CAMERA_TSDF'
    disparityPointCloud = segmentation.DisparityPointCloudItem(
        'stereo point cloud', 'CAMERA', 'CAMERA_LEFT', cameraview.imageManager)
    disparityPointCloud.addToView(view)
    om.addToObjectModel(disparityPointCloud,
                        parentObj=om.findObjectByName('sensors'))

    def createPointerTracker():
        return trackers.PointerTracker(robotStateModel, disparityPointCloud)


if useOpenniDepthImage:
    openniDepthPointCloud = segmentation.DisparityPointCloudItem(
        'openni point cloud', 'OPENNI_FRAME', 'OPENNI_FRAME_LEFT',