Ejemplo n.º 1
0
        hebbian_prune_perc=None,
        weight_prune_perc=0.3,
        hebbian_grow=False,
        on_perc=ray.tune.grid_search([0.02, 0.04]),
    ),
    "mlp-Sparse": dict(
        model=ray.tune.grid_search(["SparseModel"]),
        # sparse related
        hebbian_prune_perc=None,
        weight_prune_perc=0.3,
        hebbian_grow=False,
        on_perc=ray.tune.grid_search([0.02, 0.04]),
    ),
}
exp_configs = (
    [(name, new_experiment(base_exp_config, c)) for name, c in experiments.items()]
    if experiments
    else [(experiment_name, base_exp_config)]
)

# Register serializers.
ray.init()
for t in [
    torch.FloatTensor,
    torch.DoubleTensor,
    torch.HalfTensor,
    torch.ByteTensor,
    torch.CharTensor,
    torch.ShortTensor,
    torch.IntTensor,
    torch.LongTensor,
Ejemplo n.º 2
0
        network="gsc_sparse_dscnn",
        prune_methods=["none", "dynamic"],
        hebbian_prune_frac=0.99,
        magnitude_prune_frac=0.0,
        sparsity=0.98,
        update_nsteps=50,
        prune_dims=tuple(),
    ),
    # "static-second-layer-varying-sparsity": dict(
    #     model="DSCNN",
    #     network="gsc_sparse_dscnn",
    #     prune_methods=["none", "static"],
    #     sparsity=tune.grid_search([0.98, 0.99, 0.999]),
    # ),
}
exp_configs = ([(name, new_experiment(base_exp_config, c))
                for name, c in experiments.items()]
               if experiments else [(experiment_name, base_exp_config)])

# Download dataset.
download_dataset(base_exp_config)

# Register serializers.
ray.init()
for t in [
        torch.FloatTensor,
        torch.DoubleTensor,
        torch.HalfTensor,
        torch.ByteTensor,
        torch.CharTensor,
        torch.ShortTensor,