"gsc-smalldense": dict( model=ray.tune.grid_search(["BaseModel"]), network="small_dense_gsc", net_params=net_params, equivalent_on_perc=ray.tune.grid_search([ 0.02, 0.04, 0.06, 0.08, 0.10, ]), debug_small_dense=True, ), } exp_configs = ([(name, new_experiment(base_exp_config, c)) for name, c in experiments.items()] if experiments else [(experiment_name, base_exp_config)]) # Register serializers. ray.init() for t in [ torch.FloatTensor, torch.DoubleTensor, torch.HalfTensor, torch.ByteTensor, torch.CharTensor, torch.ShortTensor, torch.IntTensor, torch.LongTensor, torch.Tensor,
prune_methods=["none", "dynamic"], hebbian_prune_frac=0.99, magnitude_prune_frac=0.0, sparsity=0.98, update_nsteps=50, prune_dims=tuple(), ), # "static-second-layer-varying-sparsity": dict( # model="DSCNN", # network="gsc_sparse_dscnn", # prune_methods=["none", "static"], # sparsity=tune.grid_search([0.98, 0.99, 0.999]), # ), } exp_configs = ( [(name, new_experiment(base_exp_config, c)) for name, c in experiments.items()] if experiments else [(experiment_name, base_exp_config)] ) # Download dataset. download_dataset(base_exp_config) # Register serializers. ray.init() for t in [ torch.FloatTensor, torch.DoubleTensor, torch.HalfTensor, torch.ByteTensor, torch.CharTensor,