コード例 #1
0
def predict_regions(tif_file_name,
                    num_classes,
                    tile_width=20000,
                    tile_height=20000,
                    confidence=0.7,
                    intersection_threshold=0.8,
                    mask_pixel_threshold=80):
    logger.info(f"Image path: {tif_file_name}")
    temp_crs_converted_file_name = 'tif_file_with_epsg_3857.tiff'
    tif_file_folder = Folder(tif_file_name)
    working_folder = Folder(cache_folder.get_filepath(tif_file_folder.name))
    masks_folder = working_folder['Masks']
    out_filepath = working_folder[temp_crs_converted_file_name]
    convert_crs(tif_file_name, out_filepath)
    show_image_and_tile_shapes(out_filepath, tile_width, tile_height)
    predict_masks(image_path=out_filepath,
                  confidence=confidence,
                  mask_pixel_threshold=mask_pixel_threshold,
                  num_classes=num_classes,
                  tile_width=tile_width,
                  tile_height=tile_height,
                  working_folder=working_folder)
    multipolygon_wkt = get_single_wkt_from_masks(
        masks_folder=masks_folder,
        intersection_threshold=intersection_threshold)
    working_folder.clear()
    logger.info(f"Multipolygon WKT: {multipolygon_wkt}")
    return multipolygon_wkt
コード例 #2
0
def image_pipeline(img, num_of_features=100, alpha=0.3):
    logger.info(f"Number of features: {num_of_features}")
    logger.info(f"Alpha: {alpha}")
    img_copy = img.copy()
    feature_imgs = []

    for i in range(1, 11):
        temp_img = cv2.imread(feature_imgs_folder['feature_{}.jpg'.format(i)], -1)
        mask_img = cv2.imread(feature_imgs_folder['feature_{}.jpg'.format(i)], -1)
        black_img = np.zeros(mask_img.shape, dtype=np.uint8)
        temp_img = match_histograms(temp_img, img_copy, multichannel=True)
        temp_img = cv2.addWeighted(mask_img, 0.0, temp_img, 1.0, 0.0)
        temp_img = np.where(mask_img > 10, temp_img, black_img)
        feature_imgs.append(temp_img)

    for i in range(num_of_features):
        j = np.random.randint(0, 7)
        feature_img = feature_imgs[j]
        img_copy, _, _ = image_to_background(feature_img, img_copy, alpha=alpha)

    return img_copy
コード例 #3
0
def predict_masks(image_path, confidence, working_folder, mask_pixel_threshold,
                  num_classes, tile_width, tile_height):
    segmentation_model = InstanceSegmentationModel(model_checkpoint=model_path,
                                                   num_classes=num_classes,
                                                   device='cuda')
    masks_folder = working_folder['Masks']
    tiles_folder = working_folder['Tiles']
    crop_tif(image_path, tile_width, tile_height, out_folder=tiles_folder)

    for tile_idx, tile_path in enumerate(os.listdir(tiles_folder)):
        if tile_idx % 10 == 0:
            logger.info(f"Tile index: {tile_idx}")

        masks = process_tile(tiles_folder,
                             tile_path,
                             confidence,
                             mask_pixel_threshold,
                             model=segmentation_model.model)
        logger.info(f"{tile_path}: {len(masks)}")

        for mask_idx, mask in enumerate(masks):
            save_geojson_coordinates(mask_idx, mask, masks_folder,
                                     tiles_folder, tile_path, tile_width,
                                     tile_height)
コード例 #4
0
def show_image_and_tile_shapes(image_path, tile_width, tile_height):
    image = rasterio.open(image_path)
    logger.info(
        f"[Tile width] X [Tile height]: [{tile_width}] X [{tile_height}]")
    logger.info(
        f"[Image tile width] X [Image tile height]: [{image.bounds[2] - image.bounds[0]}] X "
        f"[{image.bounds[3] - image.bounds[1]}]")
    logger.info(
        f"[Image pixel width] X [Image pixel height]: [{image.width}] X [{image.height}]"
    )
    image.close()
コード例 #5
0
ファイル: train.py プロジェクト: lgblkb/dm_final_project
import pandas as pd
import geopandas as gpd
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from lgblkb_tools.visualize import Plotter
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler
from torch.utils.data import DataLoader, Dataset
import torch
import torchvision
import torchvision.transforms as transforms

is_cuda_available = torch.cuda.is_available()
logger.info('is_cuda_available: %s', is_cuda_available)
if not is_cuda_available:
    raise SystemError
device = 'cuda' if is_cuda_available else 'cpu'

this_folder = Folder(__file__)
data_folder = this_folder.parent()['data']

# Device configuration
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')


# Hyper-parameters


def get_loaders(batch_size):
コード例 #6
0
def train():
    torch.manual_seed(369)

    dataset = create_data()

    train_val_fractions = [0.8, 0.2]
    lenghts = [
        int(np.round(len(dataset) * fraction))
        for fraction in train_val_fractions
    ]
    train_dataset, val_dataset = random_split(dataset, lenghts)

    train_batch_size = int(len(train_dataset) / 5)
    logger.info("train_batch_size: %s", train_batch_size)
    train_loader = DataLoader(dataset=train_dataset,
                              batch_size=train_batch_size,
                              shuffle=True,
                              pin_memory=True)
    val_loader = DataLoader(dataset=val_dataset,
                            batch_size=1,
                            shuffle=True,
                            pin_memory=True)
    wandb.init(project="bda_project")

    model = TheModel().to(device)
    # model.load_state_dict(torch.load(model_state_savepath))

    wandb.watch(model)

    learning_rate = 1e-3
    loss_fn = nn.MSELoss(reduction='sum')

    optimizer = optim.Adam(model.parameters(), lr=learning_rate)
    scheduler = ReduceLROnPlateau(optimizer, 'min')

    train_step = make_train_step(model, loss_fn, optimizer)

    for epoch in range(200):
        training_losses = list()
        for x_batch_init, y_batch_init in train_loader:
            # for pair in zip(x_batch, y_batch):
            #     Plotter(*pair)

            # raise NotImplementedError
            for batch_idx in range(8):
                seed = np.random.randint(0, 100000000)
                x_batch = augment_batch(x_batch_init, seed)
                y_batch = augment_batch(y_batch_init, seed)
                x_batch = x_batch.to(device)
                y_batch = y_batch.to(device)
                training_loss = train_step(x_batch, y_batch)
                training_losses.append(training_loss)
        train_loss_average = np.mean(training_losses) / train_batch_size
        wandb.log({"Training loss (average)": train_loss_average})

        if epoch % 20 == 0:
            scheduler.step(train_loss_average)
            val_losses = list()
            model.eval()
            with torch.no_grad():
                worst_example = Box()
                for x_val, y_val in val_loader:
                    x_val = x_val.to(device)
                    y_val = y_val.to(device)

                    yhat_val = model(x_val)
                    val_loss = loss_fn(y_val, yhat_val).item()
                    val_losses.append(val_loss)
                    if worst_example.get('val_loss', 0) > val_loss: continue

                    worst_example.x_image = x_val.detach().data.reshape(
                        image_size)
                    worst_example.y_image = y_val.detach().data.reshape(
                        image_size)
                    worst_example.yhat_image = yhat_val.detach().data.reshape(
                        image_size)
                    worst_example.val_loss = val_loss

                images = worst_example.x_image, worst_example.yhat_image, worst_example.y_image
                wandb.log(
                    {f"Epoch {epoch} worst": [wandb.Image(i) for i in images]})
                torch.save(
                    model.state_dict(),
                    os.path.join(wandb.run.dir, f'model_epoch_{epoch}.pt'))
            model.train()
            val_loss_average = np.mean(val_losses)
            wandb.log({"Validation Loss": val_loss_average})
    # torch.save(model.state_dict(), model_state_savepath)

    # plt.plot(losses, label='Training loss')
    # plt.plot(val_losses, label='Validation loss')
    # plt.legend()
    # plt.show()
    #
    pass
コード例 #7
0
def crop_tif(tif_file,
             tile_width=20000,
             tile_height=20000,
             tile_stride_factor=2,
             out_folder='Temp'):
    out_folder = Folder(out_folder)
    source_file = rasterio.open(tif_file)
    max_left, max_top = source_file.transform * (0, 0)
    max_right, max_bottom = source_file.transform * (source_file.width,
                                                     source_file.height)
    left, top = max_left, max_top
    tile_count = 0
    horizontal_last = False
    vertical_last = False

    while True:
        tile_path = out_folder[f'{tile_count}.tiff']
        tile_count += 1

        if tile_count % 25 == 0:
            logger.info(f'Tile count: {tile_count}')

        if os.path.exists(tile_path):
            continue

        tile_region = [{
            'type':
            'Polygon',
            'coordinates': [[(left, top, 0.0), (left + tile_width, top, 0.0),
                             (left + tile_width, top - tile_height, 0.0),
                             (left, top - tile_height, 0.0)]]
        }]
        out_image, out_transform = rasterio.mask.mask(source_file,
                                                      tile_region,
                                                      crop=True)
        out_meta = source_file.meta
        out_meta.update({
            "driver": "GTiff",
            "height": out_image.shape[1],
            "width": out_image.shape[2],
            "transform": out_transform
        })

        tile = rasterio.open(tile_path, 'w', **out_meta)
        tile.write(out_image)
        tile.close()

        left += tile_width / 2

        if horizontal_last or left >= max_right:
            left = max_left
            top -= tile_height / tile_stride_factor
            horizontal_last = False
        elif left + tile_width >= max_right:
            left = max_right - tile_width
            horizontal_last = True

        if (vertical_last and horizontal_last) or top <= max_bottom:
            break
        elif top - tile_height <= max_bottom:
            top = max_bottom + tile_height
            vertical_last = True

    source_file.close()
    print(f"Tiles created - {tile_count - 1}")
コード例 #8
0
ファイル: visibility_old.py プロジェクト: Dulalet/geoapp
def main():
    # FieldPoly.synthesize(50, hole_count=1).plot(c='red', alpha=0.3, lw=10) \
    #     .get_visible_poly(ThePoint([500, 500]).plot(c='k', lw=5)).plot()
    # plt.show()
    # return
    # work_folder = data_folder['dauka_tutorial_1']
    work_folder = Folder('/home/daulet/Desktop/zones')
    original_path = work_folder['4-3-1_out.tiff']
    logger.info("original_path: %s", original_path)
    original_ds = DataSet(original_path)

    orig_array = np.where(original_ds.array == -9999, np.nan, original_ds.array)
    orig_array = np.where(np.isnan(orig_array), np.nanmin(orig_array), orig_array)
    filtered_array = gaussian_filter(orig_array, sigma=1)
    otsu_threshold = threshold_otsu(filtered_array)
    mask = filtered_array > otsu_threshold
    eroded_mask = binary_erosion(mask, iterations=5)
    dilated_mask: np.ndarray = binary_dilation(eroded_mask, iterations=5).astype(int)
    # output to tiff
    # DataSet.from_array(dilated_mask, original_ds.geo_info) \
    #     .to_file(str(Path(original_path).with_name('output.tiff')), 'GTiff', no_data_value=0, dtype=gdal.GDT_Byte)
    geoms: gpd.GeoSeries = vectorize(dilated_mask, work_folder['vectorized.geojson'], original_ds)
    geom_extent = shg.Polygon(geoms.cascaded_union.envelope.boundary)
    # FieldPoly().bounds_xy
    # shg.Polygon(shg.Polygon().boundary)

    # for geom in geoms:
    #     geom_extent = geom_extent.difference(geom)
    # otirik_env = FieldPoly(geom_extent).plot(c='red')
    # # res = otirik_env.get_visible_poly(ThePoint(otirik_env.geometry.centroid)).plot(c='k').plot()
    # res = otirik_env.get_visible_poly(ThePoint([0, 0])).plot(c='k').plot()
    # # logger.info("res:\n%s", res)
    # # visible_zone.plot()
    # plt.show()

    print('!!!!!!!!!', geoms)

    # for i in range(len(geoms)):
    #     x_arr, y_arr = geoms[i].exterior.coords.xy
    #     holes = [[0] * len(x_arr)] * len(geoms)
    #     for j in range(len(x_arr)):
    #         holes[i][j] = vis.Point(x_arr[j], y_arr[j])
    #         holes_x = holes[i][j].x()

    vis_polygons = []
    x_arr = []
    y_arr = []
    for i in range(len(geoms)):
        points = makePoints(geoms[i])
        vis_polygons.append(vis.Polygon(points))
        x_arr.append(getXPoints(points))
        y_arr.append(getYpoints(points))
        print('Hole in standard form: ', vis_polygons[i].is_in_standard_form())
    points = makePoints(geom_extent)
    walls = vis.Polygon(points)
    # vis_polygons.insert(0, walls)
    env = vis.Environment([walls, vis_polygons[0]])

    print('Walls in standard form : ', walls.is_in_standard_form())
    print('Environment is valid : ', env.is_valid(epsilon))

    observer = vis.Point(673000, 5665000)
    observer.snap_to_boundary_of(env, epsilon)
    print('!!!!', vis_polygons, '!!!!!!')
    observer.snap_to_vertices_of(env, epsilon)

    # isovist = vis.Visibility_Polygon(observer, env, epsilon)

    plotter = Plotter()
    plotter.add_images(mask, eroded_mask, dilated_mask)
    plotter.plot(lbrtwh=(1e-3, 1e-3, 1 - 1e-3, 1 - 1e-3, 1e-3, 0)).show()
    return
コード例 #9
0
def get_visibility(filepath, second_filepath=None):
    # считывание файла и создание numpy array из него:
    work_folder = Folder(filepath)
    original_path = work_folder[filepath]
    logger.info("original_path: %s", original_path)
    original_ds = DataSet(original_path)

    # удаление лишних пикселей:
    orig_array = np.where(original_ds.array == -9999, np.nan,
                          original_ds.array)
    orig_array = np.where(np.isnan(orig_array), np.nanmin(orig_array),
                          orig_array)
    filtered_array = gaussian_filter(orig_array, sigma=1)
    otsu_threshold = threshold_otsu(filtered_array)
    mask = filtered_array > otsu_threshold
    eroded_mask = binary_erosion(mask, iterations=5)
    filtered_mask: np.ndarray = binary_dilation(eroded_mask,
                                                iterations=5).astype(int)
    # geoms: gpd.GeoSeries = vectorize(orig_array>250, work_folder['vectorized.geojson'], original_ds)

    # создание векторной геометрии из numpy array:
    geoms: gpd.GeoSeries = vectorize(filtered_mask,
                                     work_folder['vectorized.geojson'],
                                     original_ds)

    # ----------------------------если расчет проводится для двух наблюдателей------------------------------
    if second_filepath is not None:
        work_folder2 = Folder(second_filepath)
        original_path2 = work_folder2[second_filepath]
        logger.info("original_path: %s", original_path2)
        original_ds2 = DataSet(original_path2)

        orig_array2 = np.where(original_ds2.array == -9999, np.nan,
                               original_ds2.array)
        orig_array2 = np.where(np.isnan(orig_array2), np.nanmin(orig_array2),
                               orig_array2)
        filtered_array2 = gaussian_filter(orig_array2, sigma=1)
        otsu_threshold2 = threshold_otsu(filtered_array2)
        mask2 = filtered_array2 > otsu_threshold2
        eroded_mask2 = binary_erosion(mask2, iterations=5)
        filtered_mask2: np.ndarray = binary_dilation(eroded_mask2,
                                                     iterations=5).astype(int)
        # geoms2: gpd.GeoSeries = vectorize(orig_array2>250, work_folder2['vectorized2.geojson'], original_ds2)
        geoms2: gpd.GeoSeries = vectorize(filtered_mask2,
                                          work_folder2['vectorized2.geojson'],
                                          original_ds2)

        # --------------------------------------------------------------
        path = Path(filepath)
        # команды выполняющиеся в командной строке для создания растрового файла с обоюдной зоной видимости (GDAL>3.1.0)
        cmd = f"""cd {path.parent}
            gdal_translate out1.tiff out11.tiff -ot Int32
            gdal_translate out2.tiff out22.tiff -ot Int32
            gdal_merge.py -init "0 0" -o merged.tiff -ot Int32 out1.tiff out2.tiff
            gdal_calc.py -A merged.tiff --outfile=whitemerged.tiff --calc="A * 0" --type=Int32 --overwrite
            gdal_merge.py -init "0 0" -o merged1.tiff -ot Int32 whitemerged.tiff out1.tiff 
            gdal_merge.py -init "0 0" -o merged2.tiff -ot Int32 whitemerged.tiff out2.tiff
            gdal_calc.py -A merged1.tiff -B merged2.tiff --outfile=final.tiff --calc="A + B" --type=Int32 --overwrite
            rm merged.tiff whitemerged.tiff merged1.tiff merged2.tiff out1.tiff out2.tiff out11.tiff out22.tiff"""
        os.system(cmd)
        # получение финального файла с обоюдной зоной видимости
        work_folder3 = Folder(str(path.parent) + '/final.tiff')
        original_path3 = work_folder3[str(path.parent) + '/final.tiff']
        logger.info("original_path: %s", original_path3)
        original_ds3 = DataSet(original_path3)
        orig_array3 = np.where(original_ds3.array > 300, original_ds3.array, 0)
        orig_array3 = np.where(np.isnan(orig_array3), np.nanmin(orig_array3),
                               orig_array3)
        filtered_array3 = gaussian_filter(orig_array3, sigma=1)
        otsu_threshold3 = threshold_otsu(filtered_array3)
        mask3 = filtered_array3 > otsu_threshold3
        eroded_mask3 = binary_erosion(mask3, iterations=1)
        filtered_mask3: np.ndarray = binary_dilation(eroded_mask3,
                                                     iterations=1).astype(int)

        # geoms3: gpd.GeoSeries = vectorize(orig_array3>500, work_folder3['vectorized3.geojson'], original_ds3)

        # получение веторной геометрии для обоюдной зоны видимости
        geoms3: gpd.GeoSeries = vectorize(filtered_mask3,
                                          work_folder3['vectorized3.geojson'],
                                          original_ds3)

        return geoms, geoms2, geoms3
    return geoms