Exemplo n.º 1
0
        center = segmentation.computeCentroid(chull.polyData)
        chullPoints = vnp.getNumpyFromVtk(chull.polyData, 'Points')
        d.addLine(plane.GetOrigin(),
                  np.array(plane.GetOrigin()) +
                  0.005 * np.array(plane.GetNormal()),
                  radius=0.0001,
                  color=[0, 0, 0])
        #d.addArrow(plane.GetOrigin(), np.array(plane.GetOrigin()) + 0.01 * np.array(plane.GetNormal()), headRadius=0.001, tubeRadius=0.0002)
        #d.addSphere(chullPoints[0], radius=0.001, color=[1,0,0])
        #d.addSphere(chullPoints[1], radius=0.001, color=[0,1,0])

    vis.showPolyData(d.getPolyData(), 'plane normals', colorByName='RGB255')

    #saveConvexHulls(chulls, name)
    #vis.showPolyData(ioUtils.readPolyData(os.path.join(name, 'merged_planes.ply')), 'merged_planes')

applogic.resetCamera([1, 1, 0])
applogic.setBackgroundColor([1, 1, 1])
view.orientationMarkerWidget().Off()
app.gridObj.setProperty('Color', [0, 0, 0])
app.gridObj.setProperty('Surface Mode', 'Surface with edges')

orbit = cameracontrol.OrbitController(view)
screenGrabberPanel = screengrabberpanel.ScreenGrabberPanel(view)

if app.getTestingInteractiveEnabled():
    view.show()
    view.resize(1280, 1024)
    app.start()
Exemplo n.º 2
0
        return trackers.PointerTracker(robotStateModel, disparityPointCloud)


if useOpenniDepthImage:
    openniDepthPointCloud = segmentation.DisparityPointCloudItem(
        'openni point cloud', 'OPENNI_FRAME', 'OPENNI_FRAME_LEFT',
        cameraview.imageManager)
    openniDepthPointCloud.addToView(view)
    om.addToObjectModel(openniDepthPointCloud,
                        parentObj=om.findObjectByName('sensors'))

if useGrid:
    grid = vis.showGrid(view, color=[0, 0, 0], alpha=0.1)
    grid.setProperty('Surface Mode', 'Surface with edges')

app.setBackgroundColor([0.3, 0.3, 0.35], [0.95, 0.95, 1])

viewOptions = vis.ViewOptionsItem(view)
om.addToObjectModel(viewOptions, parentObj=om.findObjectByName('sensors'))

viewBackgroundLightHandler = viewcolors.ViewBackgroundLightHandler(
    viewOptions, grid,
    app.getToolsMenuActions()['ActionToggleBackgroundLight'])
if not useLightColorScheme:
    viewBackgroundLightHandler.action.trigger()

if useHands:
    handcontrolpanel.init(lHandDriver, rHandDriver, robotStateModel,
                          robotStateJointController, view)
else:
    app.removeToolbarMacro('ActionHandControlPanel')
Exemplo n.º 3
0
    def createPointerTracker():
        return trackers.PointerTracker(robotStateModel, disparityPointCloud)


if useOpenniDepthImage:
    openniDepthPointCloud = segmentation.DisparityPointCloudItem('openni point cloud', 'OPENNI_FRAME', 'OPENNI_FRAME_LEFT', cameraview.imageManager)
    openniDepthPointCloud.addToView(view)
    om.addToObjectModel(openniDepthPointCloud, parentObj=om.findObjectByName('sensors'))


if useGrid:
    grid = vis.showGrid(view, color=[0,0,0], alpha=0.1)
    grid.setProperty('Surface Mode', 'Surface with edges')

app.setBackgroundColor([0.3, 0.3, 0.35], [0.95,0.95,1])

viewOptions = vis.ViewOptionsItem(view)
om.addToObjectModel(viewOptions, parentObj=om.findObjectByName('sensors'))

viewBackgroundLightHandler = viewcolors.ViewBackgroundLightHandler(viewOptions, grid,
                                app.getToolsMenuActions()['ActionToggleBackgroundLight'])
if not useLightColorScheme:
    viewBackgroundLightHandler.action.trigger()

if useHands:
    handcontrolpanel.init(lHandDriver, rHandDriver, robotStateModel, robotStateJointController, view)
else:
    app.removeToolbarMacro('ActionHandControlPanel')

        chull.setProperty('Surface Mode', 'Surface with edges')
        chull.actor.GetProperty().SetLineWidth(3)

        center = segmentation.computeCentroid(chull.polyData)
        chullPoints = vnp.getNumpyFromVtk(chull.polyData, 'Points')
        d.addLine(plane.GetOrigin(), np.array(plane.GetOrigin()) + 0.005 * np.array(plane.GetNormal()), radius=0.0001, color=[0,0,0])
        #d.addArrow(plane.GetOrigin(), np.array(plane.GetOrigin()) + 0.01 * np.array(plane.GetNormal()), headRadius=0.001, tubeRadius=0.0002)
        #d.addSphere(chullPoints[0], radius=0.001, color=[1,0,0])
        #d.addSphere(chullPoints[1], radius=0.001, color=[0,1,0])


    vis.showPolyData(d.getPolyData(), 'plane normals', colorByName='RGB255')

    #saveConvexHulls(chulls, name)
    #vis.showPolyData(ioUtils.readPolyData(os.path.join(name, 'merged_planes.ply')), 'merged_planes')


applogic.resetCamera([1,1,0])
applogic.setBackgroundColor([1,1,1])
view.orientationMarkerWidget().Off()
app.gridObj.setProperty('Color', [0,0,0])
app.gridObj.setProperty('Surface Mode', 'Surface with edges')

orbit = cameracontrol.OrbitController(view)
screenGrabberPanel = screengrabberpanel.ScreenGrabberPanel(view)

if app.getTestingInteractiveEnabled():
    view.show()
    view.resize(1280, 1024)
    app.start()