Exemplo n.º 1
0
def half_incr():
    classes = get_classes()
    included = []
    num_classes = 0
    for c in classes:
        included += c
        num_classes = len(included)
        print("{} classes: {}".format(num_classes, c))
        root, annFile = getds('train2014')
        dataset = COCOLoader(root, annFile, included=c)
        root, annFile = getds('val2014')
        dataset_test = COCOLoader(root, annFile, included=included)

        print('data prepared, train data: {}'.format(len(dataset)))
        print('data prepared, test data: {}'.format(len(dataset_test)))

        yield num_classes, dataset, dataset_test
Exemplo n.º 2
0
def create_coco_dataset(split):
    loader = COCOLoader(split)
    sz = len(loader.get_filenames())
    print("Contains %i files" % sz)

    if split in ['minival2014', 'valminusminival2014']:
        realsplit = 'val2014'
    else:
        realsplit = split

    shard_size = 5000
    num_shards = ceil(sz/shard_size)
    print("So we decided to split it in %i shards" % num_shards)
    image_placeholder = tf.placeholder(dtype=tf.uint8)
    encoded_image = tf.image.encode_png(tf.expand_dims(image_placeholder, 2))
    with tf.Session('') as sess:
        for shard in range(num_shards):
            print("Shard %i/%i is starting" % (shard, num_shards))
            output_file = os.path.join(DATASETS_ROOT, 'coco-seg-%s-%.5d-of-%.5d' % (split, shard, num_shards))
            writer = tf.python_io.TFRecordWriter(output_file)

            for i in range(shard*shard_size, min(sz, (shard+1)*shard_size)):
                f = loader.get_filenames()[i]
                img = loader.coco.loadImgs(f)[0]
                path = '%simages/%s/%s' % (loader.root, realsplit, img['file_name'])
                with tf.gfile.FastGFile(path, 'rb') as ff:
                    image_data = ff.read()
                gt_bb, gt_cats, w, h, diff = loader.read_annotations(f)
                gt_bb = normalize_bboxes(gt_bb, w, h)

                segmentation = np.zeros((h, w), dtype=np.uint8)
                coco_anns = loader._get_coco_annotations(f, only_instances=False)
                # instances = np.zeros([instance_nums, h, w], dtype=np.uint8)
                instances = []
                for num_ann, ann in enumerate(coco_anns):
                    mask = loader._read_segmentation(ann, h, w)
                    cid = loader.coco_ids_to_internal[ann['category_id']]
                    assert mask.shape == segmentation.shape
                    segmentation[mask > 0] = cid

                    instance = np.zeros((h, w), dtype=np.uint8)
                    instance[mask > 0] = cid
                    instance = list(instance.reshape(-1))
                    instances.extend(instance)
                    # if num_ann < instance_nums:
                    #     instances[num_ann] = instance

                # instances = instances.reshape(-1)
                png_string = sess.run(encoded_image,
                                      feed_dict={image_placeholder: segmentation})
                example = _convert_to_example(path, image_data, gt_bb, gt_cats, diff, png_string,
                                              instances, h, w)
                if i % 100 == 0:
                    print("%i files are processed" % i)
                writer.write(example.SerializeToString())

            writer.close()
    print("Done")
Exemplo n.º 3
0
def offline():

    classes = [list(range(0, 41))] + [[*range(41, 81)]
                                      ]  #offlien half and last half
    included = []
    num_classes = 0

    for c in classes:
        included += c
        num_classes = len(included)
        print("{} classes: {}".format(num_classes, c))
        root, annFile = getds('train2014')
        dataset = COCOLoader(root, annFile, included=included)
        root, annFile = getds('val2014')
        dataset_test = COCOLoader(root, annFile, included=included)

        print('data prepared, train data: {}'.format(len(dataset)))
        print('data prepared, test data: {}'.format(len(dataset_test)))

        yield num_classes, dataset, dataset_test
Exemplo n.º 4
0
def main(argv=None):  # pylint: disable=unused-argument
    assert args.ckpt > 0 or args.batch_eval
    assert args.detect or args.segment, "Either detect or segment should be True"
    if args.trunk == 'resnet50':
        net = ResNet
        depth = 50
    if args.trunk == 'resnet101':
        net = ResNet
        depth = 101
    if args.trunk == 'vgg16':
        net = VGG
        depth = 16

    net = net(config=net_config, depth=depth, training=False)

    if args.dataset == 'voc07' or args.dataset == 'voc07+12':
        loader = VOCLoader('07', 'test')
    if args.dataset == 'voc12':
        loader = VOCLoader('12', 'val', segmentation=args.segment)
    if args.dataset == 'coco':
        loader = COCOLoader(args.split)

    with tf.Session(config=tf.ConfigProto(allow_soft_placement=True,
                                          log_device_placement=False)) as sess:
        detector = Detector(sess,
                            net,
                            loader,
                            net_config,
                            no_gt=args.no_seg_gt)
        if args.dataset == 'coco':
            tester = COCOEval(detector, loader)
        else:
            tester = Evaluation(detector,
                                loader,
                                iou_thresh=args.voc_iou_thresh)
        if not args.batch_eval:
            detector.restore_from_ckpt(args.ckpt)
            tester.evaluate_network(args.ckpt)
        else:
            log.info('Evaluating %s' % args.run_name)
            ckpts_folder = CKPT_ROOT + args.run_name + '/'
            out_file = ckpts_folder + evaluation_logfile

            max_checked = get_last_eval(out_file)
            log.debug("Maximum checked ckpt is %i" % max_checked)
            with open(out_file, 'a') as f:
                start = max(args.min_ckpt, max_checked + 1)
                ckpt_files = glob(ckpts_folder + '*.data*')
                folder_has_nums = np.array(list((map(filename2num,
                                                     ckpt_files))),
                                           dtype='int')
                nums_available = sorted(
                    folder_has_nums[folder_has_nums >= start])
                nums_to_eval = [nums_available[-1]]
                for n in reversed(nums_available):
                    if nums_to_eval[-1] - n >= args.step:
                        nums_to_eval.append(n)
                nums_to_eval.reverse()

                for ckpt in nums_to_eval:
                    log.info("Evaluation of ckpt %i" % ckpt)
                    tester.reset()
                    detector.restore_from_ckpt(ckpt)
                    res = tester.evaluate_network(ckpt)
                    f.write(res)
                    f.flush()
Exemplo n.º 5
0
def main(argv=None):  # pylint: disable=unused-argument
    assert args.ckpt > 0 or args.batch_eval
    assert args.detect or args.segment, "Either detect or segment should be True"
    if args.trunk == 'resnet50':
        net = ResNet
        depth = 50
    if args.trunk == 'resnet101':
        net = ResNet
        depth = 101
    if args.trunk == 'vgg16':
        net = VGG
        depth = 16

    net = net(config=net_config, depth=depth, training=False)

    if args.dataset == 'voc07' or args.dataset == 'voc07+12':
        loader = VOCLoader('07', 'test')
    if args.dataset == 'voc12':
        loader = VOCLoader('12', 'val', segmentation=args.segment)
    if args.dataset == 'coco':
        loader = COCOLoader(args.split)

    with tf.Session(config=tf.ConfigProto(allow_soft_placement=True,
                                          log_device_placement=False,
                                          gpu_options=tf.GPUOptions(allow_growth=True,
                                                                    per_process_gpu_memory_fraction=0.2))) as sess:
        if args.use_profile:
            profiler = model_analyzer.Profiler(graph=sess.graph)
            detector = Detector(sess, net, loader, net_config,
                                no_gt=args.no_seg_gt, profiler=profiler)
        else:
            detector = Detector(sess, net, loader, net_config,
                                no_gt=args.no_seg_gt)

        if args.dataset == 'coco':
            tester = COCOEval(detector, loader)
        else:
            tester = Evaluation(detector, loader, iou_thresh=args.voc_iou_thresh)
        if not args.batch_eval:
            detector.restore_from_ckpt(args.ckpt)
            tester.evaluate_network(args.ckpt)
        else:
            log.info('Evaluating %s' % args.run_name)
            ckpts_folder = CKPT_ROOT + args.run_name + '/'
            out_file = ckpts_folder + evaluation_logfile

            max_checked = get_last_eval(out_file)
            log.debug("Maximum checked ckpt is %i" % max_checked)
            with open(out_file, 'a') as f:
                start = max(args.min_ckpt, max_checked+1)
                ckpt_files = glob(ckpts_folder + '*.data*')
                folder_has_nums = np.array(list((map(filename2num, ckpt_files))), dtype='int')
                nums_available = sorted(folder_has_nums[folder_has_nums >= start])
                nums_to_eval = [nums_available[-1]]
                for n in reversed(nums_available):
                    if nums_to_eval[-1] - n >= args.step:
                        nums_to_eval.append(n)
                nums_to_eval.reverse()

                for ckpt in nums_to_eval:
                    log.info("Evaluation of ckpt %i" % ckpt)
                    tester.reset()
                    detector.restore_from_ckpt(ckpt)
                    res = tester.evaluate_network(ckpt)
                    f.write(res)
                    f.flush()

        if args.use_profile:
            profile_scope_builder = option_builder.ProfileOptionBuilder(
                # option_builder.ProfileOptionBuilder.trainable_variables_parameter()
            )
            profile_scope_builder.with_max_depth(4)
            profile_scope_builder.with_min_memory(int(2e6))
            profile_scope_builder.with_step(2)
            profile_scope_builder.select(['bytes'])
            # profile_scope_builder.with_node_names(show_name_regexes=['.*resnet.*', '.*ssd.*'])
            # profile_scope_builder.with_node_names(hide_name_regexes=['.*resnet.*', '.*ssd.*'])
            # profile_scope_builder.order_by('output_bytes')
            detector.profiler.profile_name_scope(profile_scope_builder.build())