Exemple #1
0
 def run(self):
     polyData = self.getPointCloud()
     segmentation.findHorizontalSurfaces(polyData,
       removeGroundFirst=True,
       showClusters=True,
       normalEstimationSearchRadius=self.properties.getProperty('Normal estimation search radius'),
       clusterTolerance=self.properties.getProperty('Cluster tolerance'),
       distanceToPlaneThreshold=self.properties.getProperty('Distance to plane threshold'),
       normalsDotUpRange=self.properties.getProperty('Normals dot up range')
       )
Exemple #2
0
 def run(self):
     polyData = self.getPointCloud()
     segmentation.findHorizontalSurfaces(
         polyData,
         removeGroundFirst=True,
         showClusters=True,
         normalEstimationSearchRadius=self.properties.getProperty(
             'Normal estimation search radius'),
         clusterTolerance=self.properties.getProperty('Cluster tolerance'),
         distanceToPlaneThreshold=self.properties.getProperty(
             'Distance to plane threshold'),
         normalsDotUpRange=self.properties.getProperty(
             'Normals dot up range'))
Exemple #3
0
def processSnippet():

    obj = om.getOrCreateContainer('continuous')
    om.getOrCreateContainer('cont debug', obj)

    if (continuouswalkingDemo.processContinuousStereo):
        polyData = ioUtils.readPolyData(os.path.join(dataDir, 'terrain/block_snippet_stereo.vtp'))
        polyData = segmentation.applyVoxelGrid(polyData, leafSize=0.01)
    else:
        polyData = ioUtils.readPolyData(os.path.join(dataDir, 'terrain/block_snippet.vtp'))


    vis.updatePolyData( polyData, 'walking snapshot trimmed', parent='continuous')
    standingFootName = 'l_foot'

    standingFootFrame = robotStateModel.getLinkFrame(standingFootName)
    vis.updateFrame(standingFootFrame, standingFootName, parent='continuous', visible=False)

    # Step 2: find all the surfaces in front of the robot (about 0.75sec)
    clusters = segmentation.findHorizontalSurfaces(polyData)
    if (clusters is None):
        print "No cluster found, stop walking now!"
        return

    # Step 3: find the corners of the minimum bounding rectangles
    blocks,groundPlane = cwdemo.extractBlocksFromSurfaces(clusters, standingFootFrame)

    footsteps = cwdemo.placeStepsOnBlocks(blocks, groundPlane, standingFootName, standingFootFrame)

    cwdemo.drawFittedSteps(footsteps)
    def replanFootsteps(self, polyData, standingFootName, removeFirstLeftStep=True, doStereoFiltering=True, nextDoubleSupportPose=None):

        obj = om.getOrCreateContainer('continuous')
        om.getOrCreateContainer('cont debug', obj)

        vis.updatePolyData( polyData, 'walking snapshot', parent='cont debug', visible=False)

        standingFootFrame = self.robotStateModel.getLinkFrame(standingFootName)
        vis.updateFrame(standingFootFrame, standingFootName, parent='cont debug', visible=False)
        # TODO: remove the pitch and roll of this frame to support it being on uneven ground

        # Step 1: filter the data down to a box in front of the robot:
        polyData = self.getRecedingTerrainRegion(polyData, footstepsdriver.FootstepsDriver.getFeetMidPoint(self.robotStateModel))
        if (doStereoFiltering is True):
            # used for stereo data:
            polyData = segmentation.applyVoxelGrid(polyData, leafSize=0.01)
            polyData = segmentation.labelOutliers(polyData, searchRadius=0.06, neighborsInSearchRadius=15) # 0.06 and 10 originally
            vis.updatePolyData(polyData, 'voxel plane points', parent='cont debug', colorByName='is_outlier', visible=False)
            polyData = segmentation.thresholdPoints(polyData, 'is_outlier', [0, 0])
            vis.updatePolyData( polyData, 'walking snapshot trimmed', parent='cont debug', visible=True)

        # Step 2: find all the surfaces in front of the robot (about 0.75sec)
        clusters = segmentation.findHorizontalSurfaces(polyData, removeGroundFirst=False, normalEstimationSearchRadius=0.05,
                                                       clusterTolerance=0.025, distanceToPlaneThreshold=0.0025, normalsDotUpRange=[0.95, 1.0])
        if (clusters is None):
            print "No cluster found, stop walking now!"
            return

        # Step 3: find the corners of the minimum bounding rectangles
        blocks,groundPlane = self.extractBlocksFromSurfaces(clusters, standingFootFrame)

        # Step 5: Find the two foot positions we should plan with: the next committed tool and the current standing foot
        '''
        if (self.committedStep is not None):
          #print "i got a committedStep. is_right_foot?" , self.committedStep.is_right_foot
          if (self.committedStep.is_right_foot):
              standingFootTransform = self.robotStateModel.getLinkFrame(self.ikPlanner.leftFootLink)
              nextDoubleSupportPose = self.getNextDoubleSupportPose(standingFootTransform, self.committedStep.transform)
          else:
              standingFootTransform = self.robotStateModel.getLinkFrame(self.ikPlanner.rightFootLink)
              nextDoubleSupportPose = self.getNextDoubleSupportPose(self.committedStep.transform, standingFootTransform)

          comm_mesh,comm_color = self.getMeshAndColor(self.committedStep.is_right_foot)
          comm_color[1] = 0.75 ; comm_color[2] = 0.25
          stand_mesh, stand_color = self.getMeshAndColor( not self.committedStep.is_right_foot )
          stand_color[1] = 0.75 ; stand_color[2] = 0.25
          vis.updateFrame(self.committedStep.transform, 'committed foot', parent='foot placements', scale=0.2, visible=False)
          obj = vis.showPolyData(comm_mesh, 'committed step', color=comm_color, alpha=1.0, parent='steps')
          obj.actor.SetUserTransform(self.committedStep.transform)
          vis.updateFrame(standingFootTransform, 'standing foot', parent='foot placements', scale=0.2, visible=False)
          obj = vis.showPolyData(stand_mesh, 'standing step', color=stand_color, alpha=1.0, parent='steps')
          obj.actor.SetUserTransform(standingFootTransform)

        else:
            # don't have a committed footstep, assume we are standing still
            nextDoubleSupportPose = self.robotStateJointController.getPose('EST_ROBOT_STATE')
        '''


        self.displayExpectedPose(nextDoubleSupportPose)


        if not self.useManualFootstepPlacement and self.queryPlanner:
            footsteps = self.computeFootstepPlanSafeRegions(blocks, nextDoubleSupportPose, standingFootName)

        else:

            footsteps = self.placeStepsOnBlocks(blocks, groundPlane, standingFootName, standingFootFrame, removeFirstLeftStep)

            if not len(footsteps):
                return

            if self.queryPlanner:
                self.sendPlanningRequest(footsteps, nextDoubleSupportPose)
            else:
                self.drawFittedSteps(footsteps)
    def replanFootsteps(self, polyData, standingFootName, removeFirstLeftStep=True, doStereoFiltering=True, nextDoubleSupportPose=None):

        obj = om.getOrCreateContainer('continuous')
        om.getOrCreateContainer('cont debug', obj)

        vis.updatePolyData( polyData, 'walking snapshot', parent='cont debug', visible=False)

        standingFootFrame = self.robotStateModel.getLinkFrame(standingFootName)
        vis.updateFrame(standingFootFrame, standingFootName, parent='cont debug', visible=False)
        # TODO: remove the pitch and roll of this frame to support it being on uneven ground

        # Step 1: filter the data down to a box in front of the robot:
        polyData = self.getRecedingTerrainRegion(polyData, footstepsdriver.FootstepsDriver.getFeetMidPoint(self.robotStateModel))
        if (doStereoFiltering is True):
            # used for stereo data:
            polyData = segmentation.applyVoxelGrid(polyData, leafSize=0.01)
            polyData = segmentation.labelOutliers(polyData, searchRadius=0.06, neighborsInSearchRadius=15) # 0.06 and 10 originally
            vis.updatePolyData(polyData, 'voxel plane points', parent='cont debug', colorByName='is_outlier', visible=False)
            polyData = segmentation.thresholdPoints(polyData, 'is_outlier', [0, 0])
            vis.updatePolyData( polyData, 'walking snapshot trimmed', parent='cont debug', visible=True)

        # Step 2: find all the surfaces in front of the robot (about 0.75sec)
        clusters = segmentation.findHorizontalSurfaces(polyData, removeGroundFirst=False, normalEstimationSearchRadius=0.05,
                                                       clusterTolerance=0.025, distanceToPlaneThreshold=0.0025, normalsDotUpRange=[0.95, 1.0])
        if (clusters is None):
            print "No cluster found, stop walking now!"
            return

        # Step 3: find the corners of the minimum bounding rectangles
        blocks,groundPlane = self.extractBlocksFromSurfaces(clusters, standingFootFrame)

        # Step 5: Find the two foot positions we should plan with: the next committed tool and the current standing foot
        '''
        if (self.committedStep is not None):
          #print "i got a committedStep. is_right_foot?" , self.committedStep.is_right_foot
          if (self.committedStep.is_right_foot):
              standingFootTransform = self.robotStateModel.getLinkFrame(self.ikPlanner.leftFootLink)
              nextDoubleSupportPose = self.getNextDoubleSupportPose(standingFootTransform, self.committedStep.transform)
          else:
              standingFootTransform = self.robotStateModel.getLinkFrame(self.ikPlanner.rightFootLink)
              nextDoubleSupportPose = self.getNextDoubleSupportPose(self.committedStep.transform, standingFootTransform)

          comm_mesh,comm_color = self.getMeshAndColor(self.committedStep.is_right_foot)
          comm_color[1] = 0.75 ; comm_color[2] = 0.25
          stand_mesh, stand_color = self.getMeshAndColor( not self.committedStep.is_right_foot )
          stand_color[1] = 0.75 ; stand_color[2] = 0.25
          vis.updateFrame(self.committedStep.transform, 'committed foot', parent='foot placements', scale=0.2, visible=False)
          obj = vis.showPolyData(comm_mesh, 'committed step', color=comm_color, alpha=1.0, parent='steps')
          obj.actor.SetUserTransform(self.committedStep.transform)
          vis.updateFrame(standingFootTransform, 'standing foot', parent='foot placements', scale=0.2, visible=False)
          obj = vis.showPolyData(stand_mesh, 'standing step', color=stand_color, alpha=1.0, parent='steps')
          obj.actor.SetUserTransform(standingFootTransform)

        else:
            # don't have a committed footstep, assume we are standing still
            nextDoubleSupportPose = self.robotStateJointController.getPose('EST_ROBOT_STATE')
        '''


        self.displayExpectedPose(nextDoubleSupportPose)


        if not self.useManualFootstepPlacement and self.queryPlanner:
            footsteps = self.computeFootstepPlanSafeRegions(blocks, nextDoubleSupportPose, standingFootName)

        else:

            footsteps = self.placeStepsOnBlocks(blocks, groundPlane, standingFootName, standingFootFrame, removeFirstLeftStep)

            if not len(footsteps):
                return

            if self.queryPlanner:
                self.sendPlanningRequest(footsteps, nextDoubleSupportPose)
            else:
                self.drawFittedSteps(footsteps)