Exemplo n.º 1
0
def texton_target():
    image = Image('../source/section_2_sentinel.tif', 'quickbird')
    image.precompute_normalization()

    clusters = texton_cluster([image])
    descriptors = get_texton_descriptors(image)

    window = (slice(100, 125, 1), slice(100, 125, 1))

    win = descriptors[window]
    features = texton(win, clusters)

    write_target(features, '../target/texton.nc', 'texton', window)
Exemplo n.º 2
0
def test_texton(image):
    dataset = Dataset("test/data/target/texton.nc", "r", format="NETCDF4")
    target = dataset.variables['texton'][:]

    slices = dataset.variables['window'][:]
    window = slice(*slices[0:3]), slice(*slices[3:6])

    clusters = texton_cluster([image])
    descriptors = get_texton_descriptors(image)

    win = descriptors[window]
    features = texton(win, clusters)

    same = target == features

    assert same.all()
Exemplo n.º 3
0
def create_texton_feature(sat_image: SatelliteImage,
                          window_sizes,
                          image_name,
                          n_clusters=32,
                          cached=True) -> Texton:
    cache_key = "kmeans-texton-{}".format(image_name)

    if cached and cached_model_exists(cache_key):
        kmeans = load_cached_model(cache_key)
        print("Loaded cached kmeans {}".format(cache_key))
    else:
        print("Computing k-means model")
        kmeans = texton_cluster([sat_image], n_clusters=n_clusters)
        cache_model(kmeans, cache_key)

    feature = Texton(kmeans, windows=(window_sizes))

    return feature
                base_path=base_path, image_name=image_name)
            test_image_loaded = load_image(test_image)

            cached = True
            feature_set = FeatureSet()
            for feature_name, feature_scale in feature_names:
                feature_scale = tuple((fc, fc) for fc in feature_scale)

                if feature_name == "SIFT":
                    sift_clusters = sift_cluster(map(load_image, train_images))
                    # for fc in feature_scale:
                    sift = Sift(sift_clusters, windows=feature_scale)
                    feature_set.add(sift)
                    cached = False
                if feature_name == "TEXTON":
                    texton_clusters = texton_cluster(
                        map(load_image, train_images))
                    # for fc in feature_scale:
                    texton = Texton(texton_clusters, windows=feature_scale)
                    feature_set.add(texton)
                    cached = False
                if feature_name == "PANTEX":
                    # for fc in feature_scale:
                    pantex = Pantex(windows=feature_scale)
                    feature_set.add(pantex)
                if feature_name == "LACUNARITY":
                    # for fc in feature_scale:
                    lacunarity = Lacunarity(windows=feature_scale)
                    feature_set.add(lacunarity)

            feature_name = "ALL"
            # if (