def predict(config, args): plug = Wire(filename=os.path.join(args.data_dir, 'val.csv'), train_mode=True) loader = (plug | CSVLoaderXYZ( name='xyz', prefix=args.data_pre, **config.xy_splitter) | ImageLoaderInference('image_loader', True, **config.loader)) unet = predictors[config.model_name](**config.model) predictor = Predictor('unet_predictor', unet, need_setup=True) predictor.setup(path=args.model_path, load_weights=True) evaluator = Evaluator('evaluator', predictor.predictor, need_setup=False) viewer = PolygonViewer('viewer', save=args.save, job_dir=config.job_dir) polygonizer = Polygonizer('polygons') generator, steps = loader.generator while True: x, y, z = generator.next() batch = Wire(x=x, y=y) prediction = batch | predictor | polygonizer creator = ShapefileCreator('shapefile') for i in range(len(x)): shp_path = os.path.join( '/Users/nikhilsaraf/Documents', os.path.basename(z[i]).replace('image', 'pred').replace('.jpg', '.shp')) creator.transform(polygons=prediction.polygons[i], filename=shp_path, transform=get_transform( os.path.join(args.data_pre, z[i])))
def predict(config, args): image, transform, crs = open_image(args.file_path) adjusted_image = adjust_image(image) tiles = tile_image(adjusted_image, 256, 256) unet = UNetModel(**config.model) predictor = Predictor('unet_predictor', unet, need_setup=True) predictor.setup(path=args.model_path, load_weights=True) predictions = Wire(x=tiles, batch_size=5) | predictor remade_image = untile_image(predictions.predictions, adjusted_image.shape[0], adjusted_image.shape[1], 1) polygons = Wire(predictions=remade_image) | Polygonizer('polygons') (Wire(filename=args.file_path[:args.file_path.index('.')] + '.shp', transform=make_transform(transform)) + polygons | ShapefileCreator('shapefile', crs=crs.to_dict()))