Пример #1
0
def main():
    args = parser.parse_args()
    with open(args.constants, 'r') as f:
        constants = bunchify(yaml.load(f))

    logger.info("Loading image %s ..." % args.input)
    img = cv2.imread(args.input, flags=cv2.IMREAD_COLOR)
    # image scaled in 0-1 range
    img = img / 255.0

    logger.info("Generating dark channel prior ...")
    dark_channel = steps.generate_dark_channel(img, constants)

    logger.info("Estimating airlight ...")
    airlight = steps.estimate_airlight(img, dark_channel, constants)

    logger.info("Estimated airlight is %s", str(airlight))

    logger.info("Estimating transmission map ...")
    tmap = steps.estimate_tmap(dark_channel, constants)

    logger.info("Smooth transmission map ...")
    tmap = steps.smooth_tmap(img, tmap, constants)

    logger.info("Dehazing image ...")
    dehazed = steps.dehaze(img, airlight, tmap, constants)

    tools.show_img([img, dehazed])
Пример #2
0
def main():
    args = parser.parse_args()
    modify_arguments(args)

    # Resetting the graph and setting seeds
    tf.reset_default_graph()
    tf.set_random_seed(args.seed)
    np.random.seed(args.seed)

    with open(args.config_file, 'r') as stream:
        args.config = bunchify(yaml.load(stream))

    logger.info(args)

    if args.mode == 'train':
        train(args)
    else:
        test(args)
def main():
    """The main method of script."""
    args = parser.parse_args()
    with open(args.config_file, 'r') as stream:
        args.config = bunchify(yaml.load(stream))
    args.train_dir = os.path.join(args.train_dir, args.job_id)
    args.best_dir = os.path.join(args.best_dir, args.job_id)
    if not os.path.exists(args.train_dir):
        os.makedirs(args.train_dir)
    if not os.path.exists(args.best_dir):
        os.makedirs(args.best_dir)
    logger.info(args)
    if args.mode == 'train':
        train(args)
    elif args.mode == 'test' or args.mode == 'valid':
        test(args)
    elif args.mode == 'generate':
        generate(args)
    elif args.mode == 'beam':
        diverse_beam_search(args)
Пример #4
0
def main():
    args = parser.parse_args()
    modify_arguments(args)

    # Resetting the graph and setting seeds
    tf.reset_default_graph()
    tf.set_random_seed(args.seed)
    np.random.seed(args.seed)
    random.seed(args.seed)

    with open(args.config_file, 'r') as stream:
        config = yaml.load(stream)
        args.config = Munch(modify_config(args, config))

    logger.info(args)

    if args.mode == 'train':
        train(args)
    elif args.mode == 'test':
        test(args)
    elif args.mode == 'analysis':
        analysis(args)
Пример #5
0
def main():
    args = parser.parse_args()
    with open(args.constants, 'r') as f:
        constants = bunchify(yaml.load(f))

    logger.info("Loading image %s ..." % args.input)
    img = cv2.imread(args.input, flags=cv2.IMREAD_COLOR)
    # image scaled in 0-1 range
    img = img / 255.0

    # Scale array must be in decreasing order
    scaled_imgs = steps.scale(
        img,
        [1, 300.0 / 384, 200.0 / 384, 150.0 / 384, 120.0 / 384, 100.0 / 384])

    if not args.no_cache:
        patches, pairs = load(args.input)
    else:
        patches, pairs = None, None
    if patches is None and pairs is None:
        logger.info("Extracting alternate patches ...")
        patches = steps.generate_patches(scaled_imgs, constants, False)

        print("\nNumber of patches extracted per scaled image")
        print(map(len, patches))

        logger.info("Smoothening std deviations of patches ...")
        steps.smoothen(scaled_imgs, patches, constants)

        logger.info("Putting patches in buckets ...")
        steps.set_patch_buckets(patches, constants)

        logger.info("Generating pairs of patches ...")
        pairs = steps.generate_pairs(patches, constants)

        print("\nNumber of pairs generated using generate_pairs")
        print(len(pairs))

        # logger.info("Saving patches and pairs ...")
        # save(args.input, patches, pairs)
    else:
        logger.info("Using saved patches and pairs ...")

    logger.info("Filtering pairs for checking normalized correlation ...")
    pairs = steps.filter_pairs(patches, pairs, constants, all_pairs=False)

    print("\nNumber of pairs retained after filtering")
    print(len(pairs))

    logger.info("Removing outliers ...")
    pairs = steps.remove_outliers(pairs, constants)

    print("\nNumber of pairs retained after removing outliers")
    print(len(pairs))

    logger.info("Estimating global airlight ...")
    airlight = steps.estimate_airlight(pairs)

    logger.info("Estimated airlight is ...%s", str(airlight))

    # T-map estimation code begins

    del patches
    gc.collect()

    logger.info("Extracting ALL patches ...")
    patches = steps.generate_patches([img], constants, True)

    logger.info("Estimating t-map ...")
    dehazed = tmap_steps.estimate_tmap(img, patches, pairs, airlight,
                                       constants)

    logger.info("Displaying dehazed output image ...")
    h, w = len(img), len(img[0])
    img = img[0:h - 7, 0:w - 7, :]
    tools.save_img([img, dehazed])
Пример #6
0
def main():
    args = parser.parse_args()
    with open(args.constants, 'r') as f:
        constants = bunchify(yaml.load(f))

    logger.info("Loading image %s ..." % args.input)
    img = cv2.imread(args.input, flags=cv2.IMREAD_COLOR)
    # image scaled in 0-1 range
    img = img / 255.0
    # Scale array must be in decreasing order
    # scaled_imgs = steps.scale(img, [1, 0.75, 0.5, 0.375, 0.3, 0.25])
    scaled_imgs = steps.scale(
        img,
        [1, 300.0 / 384, 200.0 / 384, 150.0 / 384, 120.0 / 384, 100.0 / 384])

    if not args.no_cache:
        patches, pairs = load(args.input)
    else:
        patches, pairs = None, None
    if patches is None and pairs is None:
        logger.info("Extracting patches ...")
        patches = steps.generate_patches(scaled_imgs, constants)

        logger.info("Generating pairs of patches ...")
        pairs = steps.generate_pairs(patches, constants)

        logger.info("Removing duplicates ...")
        pairs = steps.remove_duplicates(pairs)

        logger.info("Saving patches and pairs ...")
        save(args.input, patches, pairs)
    else:
        logger.info("Using saved patches and pairs ...")

    logger.info("Filtering pairs of patches and estimating local airlight ...")
    pairs = steps.filter_pairs(patches, pairs, constants)

    pairs2 = steps.remove_overlaps(pairs, constants)

    # k = 547
    # p1 = pairs2[k]
    # stats = []

    # for i, p2 in enumerate(pairs2):
    #     if i == k:
    #         continue
    #     if p2.first == p1.first:
    #         stats.append(p2.second)
    #     elif p2.second == p1.first:
    #         stats.append(p2.first)

    # sum1 = np.zeros((len(pairs), 3))
    # for i, pair in enumerate(pairs2):
    #     sum1[i] = pair.airlight
    # print np.mean(sum1, axis=0)

    logger.info("Removing outliers ...")
    pairs2 = steps.remove_outliers(pairs2, constants)

    logger.info("Estimating global airlight ...")
    airlight = steps.estimate_airlight(pairs)

    logger.info("Estimatied airlight is ...")