def in_situ_env(environment): print( repr(environment)) # In case of error, prints the type of environment parameter_set = ParameterSet() parameter_set.add_parameters(x1=range(3), x2=range(3)) experiment = Experiment(__EXP_NAME__, parameter_set, TestComputation, PickleStorage) try: error_code = environment.run(experiment, start=2, capacity=5) assert_equal(error_code, 0) except: assert_true(False, "An exception was raised by the environment") raise storage = experiment.storage parameters_ls, result_ls = storage.load_params_and_results() assert_equal(len(parameters_ls), 5) # 5 computations assert_equal(len(result_ls), 5) # 5 computations for parameters, result in zip(parameters_ls, result_ls): assert_equal(parameters["x1"] * parameters["x2"], result["mult"])
def do_auto_refresh(auto_refresh): parameter_set = ParameterSet() parameter_set.add_parameters(x1=range(3), x2=range(3)) experiment = Experiment("{}_1".format(__EXP_NAME__), parameter_set, TestComputation) # There should be 9 computations assert_equal(len(experiment), 9) count = 0 for i, _ in enumerate( experiment.yield_computations(auto_refresh=auto_refresh)): if i == 0: state = CompletedState( Experiment.name_computation(experiment.exp_name, 6)) PickleStorage(experiment.exp_name).update_state(state) count += 1 print("Auto refresh?", auto_refresh, "--", count) assert_equal(count, 8 if auto_refresh else 9)
def test_paramset_separator(): ps = ParameterSet() ps.add_parameters(p1=[1, 2], p2=["a", "b"]) ps.add_separator(p3="param") ps.add_parameters(p1=3) assert_equal(len(ps), 6) for i, param_dict in ps: assert_equal(param_dict["p3"], "param") if i < 4: assert_in(param_dict["p1"], [1, 2]) else: assert_equal(param_dict["p1"], 3) ps.add_parameters(p2="c") assert_equal(len(ps), 9) count = 0 for i, param_dict in ps: assert_equal(param_dict["p3"], "param") if i < 4: assert_in(param_dict["p1"], [1, 2]) assert_in(param_dict["p2"], ["a", "b"]) if param_dict["p1"] == 3 and param_dict["p2"] == "c": count += 1 assert_equal(count, 1) assert_raises(ValueError, ps.add_parameters, p4=10)
def test_paramset_yield(): ps = ParameterSet() assert_equal(len(ps), 1) # The null dictionary ps.add_parameters(p1=1, p2=[2, 3], p3="param") ps.add_parameters(p1=4, p2=5) cart_prod = [ {"p1": 1, "p2": 2, "p3": "param"}, {"p1": 1, "p2": 3, "p3": "param"}, {"p1": 1, "p2": 5, "p3": "param"}, {"p1": 4, "p2": 2, "p3": "param"}, {"p1": 4, "p2": 3, "p3": "param"}, {"p1": 4, "p2": 5, "p3": "param"}, ] assert_equal(len(ps), 6) i = 0 for _, param_dict in ps: assert_in(param_dict, cart_prod) i += 1 assert_equal(i, 6) assert_equal(len(ps), 6)
set_stdout_logging() # Define the parameter set: the domain each variable can take environment, namespace = env_parser().parse() env_params = dict(namespace._get_kwargs()) data_path = "/scratch/users/rmormont/tissuenet" env = {"image_path": os.path.join(data_path, "patches"), "metadata_path": os.path.join(data_path, "metadata"), "model_path": os.path.join(data_path, "models"), "device": namespace.device, "n_jobs": namespace.n_jobs } os.makedirs(env["image_path"], exist_ok=True) os.makedirs(env["metadata_path"], exist_ok=True) os.makedirs(env["model_path"], exist_ok=True) param_set = ParameterSet() param_set.add_parameters(epochs=40) param_set.add_parameters(batch_size=[8]) param_set.add_parameters(zoom_level=[0]) param_set.add_parameters(train_size=0.7) param_set.add_parameters(random_seed=42) param_set.add_parameters(learning_rate=[0.001]) # Wrap it together as an experiment experiment = Experiment("tissuenet-e2e-train-maxzoom", param_set, CliComputationFactory(main, **env)) # Finally run the experiment environment.run(experiment)
def run(self, result, x, z, w, y=2, n=0, **parameters): import time from random import randint, normalvariate result["multiply"] = x * y # Simulate the effect of the new parameter: in this case we add # some gaussian noise to the computation of the sum result["sum"] = z + w + normalvariate(0, n) time.sleep(randint(1, 10)) if __name__ == "__main__": set_stdout_logging() parser = CTParser() environment, _ = parser.parse() param_set = ParameterSet() param_set.add_parameters(x=[1, 2, 3], z=4, w=[5, 6]) # Add the separator, the default value for what was computed previously and # the new parameter values # -- Separator (compare this to `004_adding_parameter_values.py`) param_set.add_separator(n=0) # Notice we pass the default value # -- new parameter values param_set.add_parameters(n=[0.01, 0.001]) experiment = Experiment("BasicUsage", param_set, MyComputation) environment.run(experiment)
super().__init__(exp_name, comp_name, context, storage_factory) self.my_environment_parameter = my_environment_parameter def run(self, result, x, z, w, y=2, **parameters): import time from random import randint # We can access our environment parameter print(self.my_environment_parameter) result["multiply"] = x * y result["sum"] = z + w time.sleep(randint(1, 10)) if __name__ == "__main__": set_stdout_logging() parser = CTParser() environment, _ = parser.parse() param_set = ParameterSet() param_set.add_parameters(x=[1, 2, 3], z=4, w=[5, 6]) # We use the :meth:`partialize` class method to specialize the # computation class with our environment parameter my_factory = MyComputation.partialize(my_environment_parameter="Test") experiment = Experiment("BasicUsage", param_set, my_factory) environment.run(experiment)
def test_prioritized_paramset(): ps = ParameterSet() ps.add_parameters(p1=[1, 2, 3, 4], p2=["a", "b", "c"]) pps = PrioritizedParamSet(ps) pps.prioritize("p2", "b") pps.prioritize("p1", 2) pps.prioritize("p1", 3) pps.prioritize("p2", "c") expected = [ (4, { "p1": 2, "p2": "b" }), # 12 = 0 2^0 + 0 2^1 + 1 2^2 + 1 2^ 3 (7, { "p1": 3, "p2": "b" }), # 10 = 0 2^0 + 1 2^1 + 0 2^2 + 1 2^ 3 (1, { "p1": 1, "p2": "b" }), # 8 = 0 2^0 + 0 2^1 + 0 2^2 + 1 2^ 3 (10, { "p1": 4, "p2": "b" }), # 8 = 0 2^0 + 0 2^1 + 0 2^2 + 1 2^ 3 (5, { "p1": 2, "p2": "c" }), # 5 = 1 2^0 + 0 2^1 + 1 2^2 + 0 2^ 3 (3, { "p1": 2, "p2": "a" }), # 4 = 0 2^0 + 0 2^1 + 1 2^2 + 0 2^ 3 (8, { "p1": 3, "p2": "c" }), # 3 = 1 2^0 + 1 2^1 + 0 2^2 + 0 2^ 3 (6, { "p1": 3, "p2": "a" }), # 2 = 0 2^0 + 2 2^1 + 0 2^2 + 0 2^ 3 (2, { "p1": 1, "p2": "c" }), # 1 = 1 2^0 + 0 2^1 + 0 2^2 + 0 2^ 3 (11, { "p1": 4, "p2": "c" }), # 1 = 1 2^0 + 0 2^1 + 0 2^2 + 0 2^ 3 (0, { "p1": 1, "p2": "a" }), # 0 = 0 2^0 + 0 2^1 + 0 2^2 + 0 2^ 3 (9, { "p1": 4, "p2": "a" }), # 0 = 0 2^0 + 0 2^1 + 0 2^2 + 0 2^ 3 ] result = list(pps) assert_equal(result, expected)
parser.add_argument("--device", dest="device", default="cuda:0") parser.add_argument("--n_jobs", dest="n_jobs", default=1, type=int) _ = Cytomine._add_cytomine_cli_args(parser.parser) return parser if __name__ == "__main__": set_stdout_logging() # Define the parameter set: the domain each variable can take environment, namespace = env_parser().parse() env_params = dict(namespace._get_kwargs()) os.makedirs(namespace.save_path, exist_ok=True) param_set = ParameterSet() param_set.add_parameters(batch_size=[8]) param_set.add_parameters(epochs=[5]) param_set.add_parameters(overlap=[0]) param_set.add_parameters(tile_size=[512, 256]) param_set.add_parameters(lr=[0.001]) param_set.add_parameters(init_fmaps=[8]) param_set.add_parameters(zoom_level=[0, 1, 2]) def make_build_fn(**kwargs): def build_fn(exp_name, comp_name, context="n/a", storage_factory=PickleStorage): return TrainComputation(exp_name, comp_name, **kwargs,
import time from random import randint result["multiply"] = x * y result["sum"] = z + w time.sleep(randint(1, 10)) if __name__ == "__main__": set_stdout_logging() parser = CTParser() # Add custom argument to parser parser.add_argument("y", help="The value of the `y` parameter", type=int) environment, namespace = parser.parse() param_set = ParameterSet() # add `y` given in command line to parameter set param_set.add_parameters(x=[1, 2, 3], z=4, w=[5, 6], y=namespace.y) # Change the name of the computation according to the value of `y` # Note that this is mandatory. Otherwise, Clustertools cannot distinguish # between the two "sub" experiment exp_name = "BasicUsage_{}".format(namespace.y) print("This is experiment", exp_name) experiment = Experiment(exp_name, param_set, MyComputation) environment.run(experiment)
for i in range(100): m = (a+b)/2. if P(a)*P(m) < 0: b = m else: a = m # Notify the progression of the task. `notify_progress` takes as # input a float between 0 and 1 self.notify_progress((i+1)/100.) # Wait some time to be able to see the update with # `clustertools count` time.sleep(1) result["root"] = m if __name__ == "__main__": set_stdout_logging() parser = CTParser() environment, _ = parser.parse() param_set = ParameterSet() param_set.add_parameters(a=1, b=2) experiment = Experiment("BasicUsageMonitoring", param_set, MyComputation) environment.run(experiment)
data_path = "/scratch/users/rmormont/tissuenet" env = { "image_path": os.path.join(data_path, "patches"), "metadata_path": os.path.join(data_path, "metadata"), "model_path": os.path.join(data_path, "models"), "device": namespace.device, "n_jobs": namespace.n_jobs } os.makedirs(env["image_path"], exist_ok=True) os.makedirs(env["metadata_path"], exist_ok=True) os.makedirs(env["model_path"], exist_ok=True) param_set = ParameterSet() param_set.add_parameters(pretrained=["imagenet", "mtdp"]) param_set.add_parameters(architecture=["densenet121", "resnet34"]) param_set.add_parameters(epochs=60) param_set.add_parameters(batch_size=[24]) param_set.add_parameters(zoom_level=[2]) param_set.add_parameters(train_size=0.8) param_set.add_parameters(random_seed=42) param_set.add_parameters(learning_rate=[0.001]) param_set.add_parameters(aug_elastic_alpha_low=[80]) param_set.add_parameters(aug_elastic_alpha_high=[120]) param_set.add_parameters(aug_elastic_sigma_low=[9.0]) param_set.add_parameters(aug_elastic_sigma_high=[11.0]) param_set.add_parameters(aug_hed_bias_range=[0.0125, 0.025, 0.05, 0.1]) param_set.add_parameters(aug_hed_coef_range=[0.0125, 0.025, 0.05, 0.1]) param_set.add_separator()
type=int) parser.add_argument("--image_id", dest="image_id", default=-1, type=int) _ = Cytomine._add_cytomine_cli_args(parser.parser) return parser if __name__ == "__main__": set_stdout_logging() # Define the parameter set: the domain each variable can take environment, namespace = env_parser().parse() env_params = dict(namespace._get_kwargs()) os.makedirs(namespace.save_path, exist_ok=True) param_set = ParameterSet() param_set.add_parameters(image_id=[77150767, 77150761, 77150809]) param_set.add_parameters(batch_size=[8]) param_set.add_parameters(tile_overlap=[0]) param_set.add_parameters(tile_size=256) param_set.add_parameters(init_fmaps=8) param_set.add_parameters(zoom_level=2) param_set.add_separator() param_set.add_parameters(tile_size=512) param_set.add_parameters(zoom_level=0) def make_build_fn(**kwargs): def build_fn(exp_name, comp_name, context="n/a", storage_factory=PickleStorage): return ProcessWSIComputation(exp_name,