def test_runall(self): def is_known_to_fail(model_name): forward_failing = [ "MinkUNet_WIP", "pointcnn", "RSConv_4LD", "RSConv_2LD", "randlanet", "ResUNet32", "Res16UNet34", ] if not HAS_MINKOWSKI: forward_failing += ["Res16", "MinkUNet", "ResUNetBN2B"] for failing in forward_failing: if failing.lower() in model_name.lower(): return True return False for type_file in self.model_type_files: associated_task = os.path.normpath(type_file).split( os.path.sep)[-2] models_config = OmegaConf.load(type_file) models_config = OmegaConf.merge(models_config, self.data_config) models_config.update("data.task", associated_task) models_config.update("data.grid_size", 0.05) for model_name in models_config.models.keys(): with self.subTest(model_name): if not is_known_to_fail(model_name): models_config.update("model_name", model_name) dataset = get_dataset( models_config.models[model_name].conv_type, associated_task) try: model = instantiate_model(models_config, dataset) except Exception as e: print(e) raise Exception(models_config) model.set_input(dataset[0], device) try: model.forward() model.backward() except Exception as e: print("Forward or backward failing") raise e try: if has_zero_grad(model_name): ratio = 1 else: ratio = test_hasgrad(model) if ratio < 1: print( "Model %s.%s.%s has %i%% of parameters with 0 gradient" % (associated_task, type_file.split("/")[-1][:-5], model_name, 100 * ratio)) except Exception as e: print("Model with zero gradient %s: %s" % (type_file, model_name)) raise e
def test_runall(self): def is_known_to_fail(model_name): forward_failing = ["MinkUNet_WIP", "pointcnn", "RSConv_4LD", "RSConv_2LD", "randlanet"] for failing in forward_failing: if failing.lower() in model_name.lower(): return True return False def get_dataset(conv_type, task): features = 2 if task == "registration": if conv_type.lower() == "dense": return PairMockDataset(features, num_points=2048) if conv_type.lower() == "sparse": tr = Compose( [XYZFeature(True, True, True), GridSampling3D(size=0.01, quantize_coords=True, mode="last")] ) return PairMockDatasetGeometric(features, transform=tr, num_points=1024) return PairMockDatasetGeometric(features) else: if conv_type.lower() == "dense": return MockDataset(features, num_points=2048) if conv_type.lower() == "sparse": return MockDatasetGeometric( features, transform=GridSampling3D(size=0.01, quantize_coords=True, mode="last"), num_points=1024, ) return MockDatasetGeometric(features) for type_file in self.model_type_files: associated_task = type_file.split("/")[-2] models_config = OmegaConf.load(type_file) models_config = OmegaConf.merge(models_config, self.data_config) models_config.update("data.task", associated_task) for model_name in models_config.models.keys(): with self.subTest(model_name): if not is_known_to_fail(model_name): models_config.update("model_name", model_name) dataset = get_dataset(models_config.models[model_name].conv_type, associated_task) model = instantiate_model(models_config, dataset) model.set_input(dataset[0], device) try: model.forward() model.backward() except Exception as e: print("Forward or backward failing") raise e try: ratio = test_hasgrad(model) if ratio < 1: print( "Model %s.%s.%s has %i%% of parameters with 0 gradient" % (associated_task, type_file.split("/")[-1][:-5], model_name, 100 * ratio) ) except Exception as e: print("Model with zero gradient %s: %s" % (type_file, model_name)) raise e
def test_largekpconv(self): params = load_model_config("segmentation", "kpconv", "KPConvPaper") params.update("data.use_category", True) params.update("data.first_subsampling", 0.02) dataset = MockDatasetGeometric(5) model = instantiate_model(params, dataset) model.set_input(dataset[0], device) model.forward() model.backward() ratio = test_hasgrad(model) if ratio < 1: print("Model segmentation.kpconv.KPConvPaper has %i%% of parameters with 0 gradient" % (100 * ratio))
def test_pointnet2ms(self): params = load_model_config("segmentation", "pointnet2", "pointnet2_largemsg") params.update("data.use_category", True) dataset = MockDataset(5, num_points=2048) model = instantiate_model(params, dataset) model.set_input(dataset[0], device) model.forward() model.backward() ratio = test_hasgrad(model) if ratio < 1: print( "Model segmentation.pointnet2.pointnet2_largemsgs has %i%% of parameters with 0 gradient" % (100 * ratio) )
def test_siamese_minkowski(self): params = load_model_config("registration", "minkowski", "MinkUNet_Fragment") transform = Compose( [XYZFeature(True, True, True), GridSampling3D(size=0.01, quantize_coords=True, mode="last")] ) dataset = PairMockDatasetGeometric(5, transform=transform, num_points=1024, is_pair_ind=True) model = instantiate_model(params, dataset) d = dataset[0] model.set_input(d, device) model.forward() model.backward() ratio = test_hasgrad(model) if ratio < 1: print( "Model registration.minkowski.MinkUNet_Fragment has %i%% of parameters with 0 gradient" % (100 * ratio) )
def test_runall(self): def is_known_to_fail(model_name): forward_failing = [ "path_pretrained", "MinkUNet_WIP", "pointcnn", "RSConv_4LD", "RSConv_2LD", "randlanet", "PVCNN", "ResUNet32", ] if not HAS_MINKOWSKI: forward_failing += [ "Res16", "MinkUNet", "ResUNetBN2B", "ResUNet32", "Res16UNet34" ] for cm in [2, 4, 6]: for h in [1, 2, 3, 4]: for s in ["", "_unshared"]: forward_failing += [ "MS_SVCONV_B{}cm_X2_{}head{}".format(cm, h, s) ] for failing in forward_failing: if failing.lower() in model_name.lower(): return True return False def is_torch_sparse_backend(model_name): torchsparse_backend = [ "ResUNet32", "Res16UNet34", ] for cm in [2, 4, 6]: for h in [1, 2, 3, 4]: for s in ["", "_unshared"]: torchsparse_backend += [ "MS_SVCONV_B{}cm_X2_{}head{}".format(cm, h, s) ] for backend in torchsparse_backend: if backend.lower() in model_name.lower(): return True return False for type_file in self.model_type_files: associated_task = os.path.normpath(type_file).split( os.path.sep)[-2] # models_config = OmegaConf.load(type_file) models_config = OmegaConf.create( {"models": OmegaConf.load(type_file)}) models_config = OmegaConf.merge(models_config, self.data_config) # Update to OmegaConf 2.0 if omegaconf.__version__ == '1.4.1': models_config.update("data.task", associated_task) models_config.update("data.grid_size", 0.05) else: OmegaConf.update(models_config, "data.task", associated_task, merge=True) OmegaConf.update(models_config, "data.grid_size", 0.05, merge=True) models = models_config.get("models") models_keys = models.keys() if models is not None else [] for model_name in models_keys: if model_name == 'defaults': # workaround for recursive defaults continue with self.subTest(model_name): if not is_known_to_fail(model_name): if omegaconf.__version__ == '1.4.1': models_config.update("model_name", model_name) else: OmegaConf.update(models_config, "model_name", model_name, merge=True) # modify the backend in minkowski to have the forward if is_torch_sparse_backend(model_name): models_config.models[ model_name].backend = "minkowski" dataset = get_dataset( models_config.models[model_name].conv_type, associated_task) try: model = instantiate_model(models_config, dataset) except Exception as e: print(e) raise Exception(models_config) model.set_input(dataset[0], device) try: model.forward() model.backward() except Exception as e: print("Forward or backward failing") raise e try: if has_zero_grad(model_name): ratio = 1 else: ratio = test_hasgrad(model) if ratio < 1: print( "Model %s.%s.%s has %i%% of parameters with 0 gradient" % (associated_task, type_file.split("/")[-1][:-5], model_name, 100 * ratio)) except Exception as e: print("Model with zero gradient %s: %s" % (type_file, model_name)) raise e