Beispiel #1
0
def hexagonPIDPipieline(frame, cfg):

    mask = targetUtils.threshholdFrame(frame, cfg)

    # Slight renaming, for convention
    visImg = frame
    # TODO: Add bitwise and
    # -== Target Detection ==-
    hexagonTarget, visImg = targetUtils.findTarget(visImg, mask, cfg)

    # If we don't detect a target, drop out here
    if hexagonTarget is None:
        return (None, None, visImg)

    imgPts = targetUtils.target2pnp8Points(hexagonTarget, cfg)

    # -== PID Offset Value Calculation ==- #
    targetCenter = targetUtils.getTargetCenter(imgPts)

    yawOffset = targetUtils.getYawError(frame, targetCenter)

    return (yawOffset, frame)
Beispiel #2
0
def realPNP(frame, config):
    # TODO: Try and convert the chamelion 'boundingbox' method to python
    # frame --> visImg (used for drawing) and mask (used for target detection)
    robotPose = None

    # -== Frame Threshholding ==-
    mask = targetUtils.threshholdFrame(frame, config)

    # Slight renaming, for convention
    visImg = frame
    # TODO: Add bitwise and
    # -== Target Detection ==-
    hexagonTarget, visImg = targetUtils.findTarget(visImg, mask, config)

    # If we don't detect a target, drop out here
    if hexagonTarget is None:
        return (None, None, visImg)
    # -== Target Manipulation ==-
    # TODO: Pretty sure config in unnessissary here
    imgPts = targetUtils.target2pnp8Points(hexagonTarget, config)

    # -== PnP Offset Value Calculation ==- #
    targetCenter = targetUtils.getTargetCenter(imgPts)

    yawOffset = targetUtils.getYawError(frame, targetCenter)

    xlateVector, rotVec, visImg = poseEstimation.estimatePose(
        visImg, imgPts, config)

    # Debug
    #  - logging.debug("Translation Vector: ".format(xlateVector))
    #  - logging.debug("Rotation Vector: ".format(rotVec))

    # MATHUTIL MATRIX TRANSFORMATIONS GO HERE
    # All cuirrent status is avaible here through cfg->>state->

    return (robotPose, yawOffset, visImg)