Exemplo n.º 1
0
def pipeline(sceneDir):

    # Load common values for this scene
    paths = get_work_paths(sceneDir)
    sceneName = get_scene_name(sceneDir)
    classNameToIdx, classIdxToName = create_class_idx_dict(paths['modelDir'])
    nClasses = len(classIdxToName)
    K = parse_inner_parameters(paths['cameraPath'])
    aggregationSettings['cameraMatrix'] = K

    # Load (and downsample) motion
    motionFull = parse_motion(
        os.path.join(paths['poseOutDir'], 'motionScaled.txt'))
    nCamerasFull = find_nbr_of_files(paths['rgbDir'], formats['rgbFormat'])
    assert (len(motionFull) == nCamerasFull)
    viewpointIdx = keep_every_nth(nCamerasFull, keepRate)
    # if keepRate > 0:
    # 	viewpointIdx = [idx*keepRate+1 for idx in range(nCamerasFull) if idx < nCamerasFull/keepRate]
    # else:
    # 	viewpointIdx = [idx+1 for idx in range(nCamerasFull)]
    motion = [motionFull[idx - 1] for idx in viewpointIdx]
    nCameras = len(motion)

    # Find instances and their poses for each class in this scene
    poses = []
    for iClass in range(1, nClasses + 1):

        # Add class network and keypoints paths to paths
        className = classIdxToName[iClass]
        print('Running for class ' + className)
        tStart = time.time()
        # paths['networkPath'] = os.path.join(paths['networkDir'], className, className+'Network.pth')
        # paths['keypointsPath'] = os.path.join(paths['modelDir'], str(iClass)+'_keypoints.txt')
        paths = get_work_paths(sceneDir, className, classNameToIdx)

        # Check if a network exists for the class. If not, don't predict any poses.
        if not os.path.isfile(paths['networkPath']):
            poses.append(None)
            #print('No network found for class '+ className + '. Skipping this.')
            continue

        # Create log directory for this scene/class
        if logDir is not None:
            thisLogDir = os.path.join(logDir, sceneName, 'Class' + str(iClass))
            try:
                os.makedirs(thisLogDir)
            except FileExistsError:
                pass
        else:
            thisLogDir = None

        # Load the 3D keypoints
        points3D = parse_3D_keypoints(paths['keypointsPath'],
                                      addCenter=True)  #[3, nKeypoints]
        points3D = points3D.T

        # Load the network
        network = load_network(
            paths)  # FIX: network needs to be loaded separately for each class

        # Predict the 2D keypoints in each viewpoint
        points2D, covariance = calculate_points_multiple_views(
            viewpointIdx,
            network,
            paths,
            formats,
            ransacSettings,
            detectionSettings,
            plotView=False,
            logDir=thisLogDir,
            verbose=True)
        cuda.empty_cache()
        print("Finished class " + className +
              " after {} seconds".format(time.time() - tStart))

        # Finally, calculate poses
        aggregationSettings['classIdx'] = iClass
        if thisLogDir is not None:
            pickleSave((points2D, points3D, covariance, motion,
                        paths, aggregationSettings),
                       os.path.join(thisLogDir, 'state'))
        classPoses = estimate_pose_center_ms(
            points2D,
            points3D,
            covariance,
            motion,
            paths,
            aggregationSettings,
            plotCenters=False,
            logDir=thisLogDir)  # List of poses of length nInstances
        nClassInstances = len(classPoses)
        print('Detected {} instances of class {}.'.format(
            nClassInstances, className))
        #input('Press enter to continue')
        poses.append(classPoses)
        if thisLogDir is not None:
            pickleSave(classPoses,
                       os.path.join(thisLogDir, 'finalClassPoses.pickle'))

    if logDir is not None:
        pickleSave(poses, os.path.join(logDir, sceneName, 'finalPoses.pickle'))
    return poses  # List of lists of poses. poses[i][j] is the pose of the jth instance of the ith class
Exemplo n.º 2
0
# Pre-load the networks
networks = {}
for iClass in rangeObj:

    # Add class network and keypoints paths to paths
    className = classIdxToName[iClass]
    paths['networkPath'] = os.path.join(paths['networkDir'], className,
                                        className + 'Network.pth')
    paths['keypointsPath'] = os.path.join(paths['modelDir'],
                                          str(iClass) + '_keypoints.txt')

    # Check if a network exists for the class. If not, don't do shit
    if not os.path.isfile(paths['networkPath']):
        continue

    network = load_network(paths)
    networks[iClass] = network

for iImage in range(1, nImages + 1):

    # Calculate network outputs for all classes in the image
    classMask = {}
    classAngles = {}
    for iClass in rangeObj:

        # # Add class network and keypoints paths to paths
        # className = classIdxToName[iClass]
        # paths['networkPath'] = os.path.join(paths['networkDir'], className, className+'Network.pth')
        # paths['keypointsPath'] = os.path.join(paths['modelDir'], str(iClass)+'_keypoints.txt')

        # # Check if a network exists for the class. If not, don't do shit
Exemplo n.º 3
0
# Data
dataDir = '/var/www/webdav/Data/ICA/Scene1/pvnet'
className = 'tval'

# Implicit
paths = get_work_paths(dataDir, className)

# RUN PIPELINE
####################################

# Find number of rgb images in rgbDir
nImages = find_nbr_of_files(paths['rgbDir'], 'jpg')

# Regular forward loop
network = load_network(dataDir, className)

print('Forward propping one by one')
t = time.time()
for iRgb in range(nImages):
    rgb = load_image_batch(paths['rgbDir'], rgbIdx=1, batchSize=1)
    segPred, verPred = network(rgb)
print('Elapsed time: {} seconds\n'.format(time.time() - t))  # ca 16 seconds

print('Forward propping in batches')
t = time.time()
batchSize = 16
for iRgb in range(nImages // batchSize):
    rgb = load_image_batch(paths['rgbDir'], rgbIdx=1, batchSize=batchSize)
    segPred, verPred = network(rgb)
print('Elapsed time: {} seconds'.format(time.time() - t))  # ca 9 seconds