コード例 #1
0
def main():
    # Look at resize_op/resize_op.cpp to start this tutorial.

    db = Database()

    cwd = os.path.dirname(os.path.abspath(__file__))
    if not os.path.isfile(os.path.join(cwd,
                                       'resize_op/build/libresize_op.so')):
        print(
            'You need to build the custom op first: \n'
            '$ pushd {}/resize_op; mkdir build && cd build; cmake ..; make; popd'
            .format(cwd))
        exit()

    # To load a custom op into the Scanner runtime, we use db.load_op to open the
    # shared library we compiled. If the op takes arguments, it also optionally
    # takes a path to the generated python file for the arg protobuf.
    db.load_op(os.path.join(cwd, 'resize_op/build/libresize_op.so'),
               os.path.join(cwd, 'resize_op/build/resize_pb2.py'))

    frame = db.sources.FrameColumn()
    # Then we use our op just like in the other examples.
    resize = db.ops.MyResize(frame=frame, width=200, height=300)
    output_op = db.sinks.Column(columns={'resized_frame': resize})
    job = Job(op_args={
        frame: db.table('example').column('frame'),
        output_op: 'example_resized',
    })
    db.run(output_op, [job], force=True)
コード例 #2
0
ファイル: segment_cpp.py プロジェクト: sjoerdapp/scannerapps
else:
    db = Database()

cwd = os.path.dirname(os.path.abspath(__file__))
if not os.path.isfile(os.path.join(cwd, 'segment_op/build/libsegment_op.so')):
    print(
        'You need to build the custom op first: \n'
        '$ pushd {}/segment_op; mkdir build && cd build; cmake ..; make; popd'.
        format(cwd))
    exit()

# To load a custom op into the Scanner runtime, we use db.load_op to open the
# shared library we compiled. If the op takes arguments, it also optionally
# takes a path to the generated python file for the arg protobuf.
if opt.cloud:
    db.load_op('/app/segment_op/build/libsegment_op.so',
               os.path.join(cwd, 'segment_op/build/segment_pb2.py'))
else:
    db.load_op(os.path.join(cwd, 'segment_op/build/libsegment_op.so'),
               os.path.join(cwd, 'segment_op/build/segment_pb2.py'))

model_path = '/home/krematas/code/scannerapps/model.yml.gz'

config = db.config.config['storage']
params = {
    'bucket': opt.bucket,
    'storage_type': config['type'],
    'endpoint': 'storage.googleapis.com',
    'region': 'US'
}

encoded_image = db.sources.Files(**params)
コード例 #3
0
        db = Database()

    cwd = os.path.dirname(os.path.abspath(__file__))
    # cwd = '/home/krematas/code/scannerapps/soccer/instance_segmentation/'
    if not os.path.isfile(os.path.join(cwd, 'edges_op/build/libedges_op.so')):
        print(
            'You need to build the custom op first: \n'
            '$ pushd {}/edges_op; mkdir build && cd build; cmake ..; make; popd'
            .format(cwd))
        exit()

    # To load a custom op into the Scanner runtime, we use db.load_op to open the
    # shared library we compiled. If the op takes arguments, it also optionally
    # takes a path to the generated python file for the arg protobuf.
    if opt.cloud:
        db.load_op('/app/edges_op/build/libedges_op.so',
                   os.path.join(cwd, 'edges_op/build/edges_pb2.py'))
    else:
        db.load_op(os.path.join(cwd, 'edges_op/build/libedges_op.so'),
                   os.path.join(cwd, 'edges_op/build/edges_pb2.py'))

    config = db.config.config['storage']
    params = {
        'bucket': opt.bucket,
        'storage_type': config['type'],
        'endpoint': 'storage.googleapis.com',
        'region': 'US'
    }

    encoded_image = db.sources.Files(**params)

    output_op = db.sinks.FrameColumn(columns={'image': encoded_image})
コード例 #4
0
        print('db was created.')
    else:
        db = Database()

    cwd = os.path.dirname(os.path.abspath(__file__))
    if not os.path.isfile(
            os.path.join(cwd, 'instancesegm_op/build/libinstancesegm_op.so')):
        print(
            'You need to build the custom op first: \n'
            '$ pushd {}/instancesegm_op; mkdir build && cd build; cmake ..; make; popd'
            .format(cwd))
        exit()

    if opt.cloud:
        db.load_op(
            '/app/instancesegm_op/build/libinstancesegm_op.so',
            os.path.join(cwd, 'instancesegm_op/build/instancesegm_pb2.py'))
    else:
        db.load_op(
            os.path.join(cwd, 'instancesegm_op/build/libinstancesegm_op.so'),
            os.path.join(cwd, 'instancesegm_op/build/instancesegm_pb2.py'))

    config = db.config.config['storage']
    params = {
        'bucket': opt.bucket,
        'storage_type': config['type'],
        'endpoint': 'storage.googleapis.com',
        'region': 'US'
    }

    print(db.summarize())
コード例 #5
0
ファイル: calibrate.py プロジェクト: sjoerdapp/scannerapps
    ]
    indeces = [i for i in range(0, len(image_files_all), opt.framestep)]

    if image_files_all[-1] not in image_files:
        image_files.append(image_files_all[-1])
        mask_files.append(mask_files_all[-1])
        indeces.append(len(image_files_all) - 1)

    encoded_image = db.sources.Files(**params)
    frame_img = db.ops.ImageDecoder(img=encoded_image)

    encoded_mask = db.sources.Files(**params)
    frame_mask = db.ops.ImageDecoder(img=encoded_mask)

    cwd = os.path.dirname(os.path.abspath(__file__))
    db.load_op(os.path.join(cwd, 'distancetr_op/build/libdistancetr_op.so'),
               os.path.join(cwd, 'distancetr_op/build/distance_pb2.py'))

    dist_transform_class = db.ops.DistanceTransform(frame=frame_img,
                                                    mask=frame_mask,
                                                    h=h,
                                                    w=w)
    output_op = db.sinks.FrameColumn(columns={'frame': dist_transform_class})

    job = Job(
        op_args={
            encoded_image: {
                'paths': image_files,
                **params
            },
            encoded_mask: {
                'paths': mask_files,
コード例 #6
0
data = data[:10]
print(len(data))
db.new_table('test', ['img'], data, force=True)

img = db.sources.FrameColumn()

cwd = os.path.dirname(os.path.abspath(__file__))
if not os.path.isfile(os.path.join(cwd, 'resize_op/build/libresize_op.so')):
    print(
        'You need to build the custom op first: \n'
        '$ pushd {}/resize_op; mkdir build && cd build; cmake ..; make; popd'.
        format(cwd))
    exit()

# To load a custom op into the Scanner runtime, we use db.load_op to open the
# shared library we compiled. If the op takes arguments, it also optionally
# takes a path to the generated python file for the arg protobuf.
db.load_op(os.path.join(cwd, 'resize_op/build/libresize_op.so'),
           os.path.join(cwd, 'resize_op/build/resize_pb2.py'))

# Then we use our op just like in the other examples.
resize = db.ops.MyResize(frame=img, width=200, height=300)
output_op = db.sinks.Column(columns={'resized_frame': resize})
job = Job(
    op_args={
        img: db.table('test').column('img'),
        # encoded_image: {'paths': image_files, **params},
        output_op: 'example_resized',
    })
db.run(output_op, [job], force=True)
コード例 #7
0
    '--matching_packet_size', dest='matching_packet_size', type=int, default=25, help='the feature matching packet size')
arg_parser.add_argument('--cluster_size', dest='cluster_size',
                        default=10, help='the number of key images to use for each submodel', type=int)
arg_parser.add_argument('--cluster_overlap', dest='cluster_overlap', default=5,
                        type=int, help='the number of key images that adjacent submodels share')
arg_parser.add_argument('--merge_batch_size', dest='merge_batch_size', default=,
                        type=int, help='the number of key images that adjacent submodels share')
args = arg_parser.parse_args()

db = Database(config_path=args.scanner_config)

################################################################################
# Load cpp ops
################################################################################
cwd = os.path.dirname(os.path.abspath(__file__))
db.load_op(
    os.path.join((cwd), 'op_cpp/build/libprepare_image.so'))
db.load_op(
    os.path.join(cwd, 'op_cpp/build/libextraction_op.so'),
    os.path.join(cwd, 'op_cpp/build/siftExtraction_pb2.py'))
db.load_op(
    os.path.join(cwd, 'op_cpp/build/libsequential_matching.so'),
    os.path.join(cwd, 'op_cpp/build/colmap_pb2.py'))
db.load_op(
    os.path.join(cwd, 'op_cpp/build/libincremental_mapping.so'))
db.load_op(
    os.path.join(cwd, 'op_cpp/build/libmerge_mapping.so'))

################################################################################
# prepare images
################################################################################
image_dir = os.path.expanduser(args.image_path)
コード例 #8
0
    dest='overlap',
    type=int,
    default=10,
    help="the number of overlapping images to use for each reference image")
arg_parser.add_argument(
    '--output_table',
    dest='output_table',
    help='the name of the output table',
    default='patch_match')
args = arg_parser.parse_args()

db = Database(config_path=args.scanner_config)

cwd = os.path.dirname(os.path.abspath(__file__))

db.load_op(
    os.path.join(cwd, 'op_cpp/build/libpatch_match.so'))

num_images = db.table(args.input_table).num_rows()

R = db.sources.Column()
T = db.sources.Column()
K = db.sources.Column()
width = db.sources.Column()
height = db.sources.Column()
scan_width = db.sources.Column()
bitmap = db.sources.Column()
depth_min = db.sources.Column()
depth_max = db.sources.Column()
image_id = db.sources.Column()

def get_partition_ranges(num, step):
コード例 #9
0
    default=10,
    help='the matching window size',
    type=int)
arg_parser.add_argument(
    '--packet_size',
    dest='packet_size',
    type=int,
    default=25,
    help='the number of frames to dispatch to each feature matching kernel')
args = arg_parser.parse_args()

db = Database(config_path=args.scanner_config)

cwd = os.path.dirname(os.path.abspath(__file__))
db.load_op(
    os.path.join(cwd, 'op_cpp/build/libsequential_matching.so'),
    os.path.join(cwd, 'op_cpp/build/colmap_pb2.py'))

matching_stencil = range(0, args.overlap)

image_ids = db.sources.Column()
keypoints = db.sources.Column()
descriptors = db.sources.Column()
camera = db.sources.Column()

pair_image_ids, two_view_geometries = db.ops.SequentialMatchingCPU(
    image_ids=image_ids,
    keypoints=keypoints,
    descriptors=descriptors,
    stencil=matching_stencil)
コード例 #10
0
)
arg_parser.add_argument('--num_reg_images',
                        dest='num_reg_images',
                        type=int,
                        help="number of registered images in reconstruction")
arg_parser.add_argument('--output_table',
                        dest='output_table',
                        help='the name of the output table',
                        default='prepare_patch_match')
args = arg_parser.parse_args()

db = Database(config_path=args.scanner_config)

cwd = os.path.dirname(os.path.abspath(__file__))

db.load_op(os.path.join(cwd, 'op_cpp/build/libprepare_patch_match.so'),
           os.path.join(cwd, 'op_cpp/build/prepare_patch_match_pb2.py'))

image_id = db.sources.Column()

# image_id_repeated = db.streams.RepeatNull(image_id, args.num_reg_images)
image_id_sampled = db.streams.Range(input=image_id,
                                    start=0,
                                    end=args.num_reg_images)
R, T, K, width, height, scan_width, bitmap, depth_min, depth_max, id = db.ops.PreparePatchMatch(
    sparse_reconstruction_path=args.input_path,
    batch=args.num_reg_images,
    image_id=image_id_sampled)

output = db.sinks.Column(
    columns={
        'R': R,
コード例 #11
0
                        help='the path to the scanner config file')
arg_parser.add_argument('--input_table',
                        dest='input_table',
                        default='submodels',
                        help='the input table where submodels are stored')
arg_parser.add_argument('--output_table',
                        dest='output_table',
                        help='the name of the output table',
                        default='models')
args = arg_parser.parse_args()

db = Database(config_path=args.scanner_config)

cwd = os.path.dirname(os.path.abspath(__file__))

db.load_op(os.path.join(cwd, 'op_cpp/build/libmerge_mapping.so'),
           os.path.join(cwd, 'op_cpp/build/merge_mapping_pb2.py'))

num_submodels = db.table(args.input_table).num_rows()
print("num submodels: %d" % num_submodels)

cluster_id_src = cluster_id = db.sources.Column()
cameras_src = cameras = db.sources.Column()
images_src = images = db.sources.Column()
points3d_src = points3d = db.sources.Column()

cluster_id, cameras, images, points3d = db.ops.MergeMappingCPU(
    cluster_id=cluster_id,
    cameras=cameras,
    images=images,
    points3d=points3d,
    num_models=num_submodels)
コード例 #12
0
arg_parser.add_argument('--output_table',
                        dest='output_table',
                        help='the name of the output table',
                        default='extraction')
arg_parser.add_argument(
    '--packet_size',
    dest='packet_size',
    type=int,
    default=25,
    help='the number of frames to dispatch to each extraction kernel')
args = arg_parser.parse_args()

db = Database(config_path=args.scanner_config)

cwd = os.path.dirname(os.path.abspath(__file__))
db.load_op(os.path.join(cwd, 'op_cpp/build/libextraction_op.so'),
           os.path.join(cwd, 'op_cpp/build/siftExtraction_pb2.py'))

image_ids = db.sources.Column()
frames = db.sources.FrameColumn()

# run SIFT extractions
keypoints, descriptors, cameras = db.ops.SiftExtraction(image_ids=image_ids,
                                                        frames=frames)

output = db.sinks.Column(
    columns={
        'image_id': image_ids,
        'keypoints': keypoints,
        'descriptors': descriptors,
        'camera': cameras
    })
コード例 #13
0
    help='the number of key images to use for each cluster',
    type=int)
arg_parser.add_argument(
    '--cluster_overlap',
    dest='cluster_overlap',
    default=5,
    type=int,
    help='the number of key images that adjacent cluster share')
args = arg_parser.parse_args()

db = Database(config_path=args.scanner_config)

cwd = os.path.dirname(os.path.abspath(__file__))

db.load_op(
    os.path.join(cwd, 'op_cpp/build/libincremental_mapping.so'),
    os.path.join(cwd, 'op_cpp/build/incremental_mapping_pb2.py'))

step_size = args.cluster_size - args.cluster_overlap

matching_stencil = range(0, args.matching_overlap + args.cluster_size)
num_images = db.table(args.extraction_table).num_rows()

image_ids = db.sources.Column()
pair_image_ids = db.sources.Column()
two_view_geometries = db.sources.Column()
keypoints = db.sources.Column()
camera = db.sources.Column()

cluster_id, cameras, images, points3d = db.ops.IncrementalMappingCPU(
    image_id=image_ids,
コード例 #14
0
    '--image_path',
    dest='image_path',
    required=True,
    help=
    'the path to images, the images should be named appropriately to reflect their orders e.g. image_01.JPG, image_02.JPG'
)
arg_parser.add_argument('--output_table',
                        dest='output_table',
                        help='the name of the output table',
                        default='frames')
args = arg_parser.parse_args()

db = Database(config_path=args.scanner_config)
cwd = os.path.dirname(os.path.abspath(__file__))

db.load_op(os.path.join((cwd), 'op_cpp/build/libprepare_image.so'))

image_dir = os.path.expanduser(args.image_path)
image_paths = []

for i, file in enumerate(os.listdir(image_dir)):
    image_paths.append(os.path.join(image_dir, file))

image_paths.sort()

files = db.sources.Files()
frames = db.ops.ImageDecoder(img=files)

# generate image ids
image_ids = db.ops.PrepareImage(frames=frames)