def remote_run_experiment(self): for parameter_setup in self._parameter_setups: print(parameter_setup) task = Task.create( project_name=f"{self._project_name}", task_name=self.make_task_name(parameter_setup), repo=self._repo, branch=self._branch, script=self._script, requirements_file="../requirements.txt" ) task.set_parent(Task.current_task().id) task.connect(parameter_setup) Task.enqueue(task, self._queue)
def cli(): title = 'ClearML launch - launch any codebase on remote machine running clearml-agent' print(title) parser = ArgumentParser(description=title) setup_parser(parser) # get the args args = parser.parse_args() if args.version: from ...version import __version__ print('Version {}'.format(__version__)) exit(0) create_populate = CreateAndPopulate( project_name=args.project, task_name=args.name, task_type=args.task_type, repo=args.repo or args.folder, branch=args.branch, commit=args.commit, script=args.script, working_directory=args.cwd, packages=args.packages, requirements_file=args.requirements, docker=args.docker, base_task_id=args.base_task_id, add_task_init_call=not args.skip_task_init, raise_on_missing_entries=True, verbose=True, ) # verify args create_populate.update_task_args(args.args) print('Creating new task') create_populate.create_task() # update Task args create_populate.update_task_args(args.args) print('New task created id={}'.format(create_populate.get_id())) if not args.queue: print('Warning: No queue was provided, leaving task in draft-mode.') exit(0) Task.enqueue(create_populate.task, queue_name=args.queue) print('Task id={} sent for execution on queue {}'.format( create_populate.get_id(), args.queue)) print('Execution log at: {}'.format( create_populate.task.get_output_log_web_page()))
# Select base template task # Notice we can be more imaginative and use task_id which will eliminate the need to use project name template_task = Task.get_task(project_name='examples', task_name=params['experiment_template_name']) for i in range(params['total_number_of_experiments']): # clone the template task into a new write enabled task (where we can change parameters) cloned_task = Task.clone(source_task=template_task, name=template_task.name + ' {}'.format(i), parent=template_task.id) # get the original template parameters cloned_task_parameters = cloned_task.get_parameters() # override with random samples form grid for k in space.keys(): cloned_task_parameters[k] = space[k]() # put back into the new cloned task cloned_task.set_parameters(cloned_task_parameters) print('Experiment {} set with parameters {}'.format( i, cloned_task_parameters)) # enqueue the task for execution Task.enqueue(cloned_task.id, queue_name=params['execution_queue_name']) print('Experiment id={} enqueue for execution'.format(cloned_task.id)) # we are done, the next step is to watch the experiments graphs print('Done')
param["param_name_new_value"] = 3 # The queue where we want the template task (clone) to be sent to param["execution_queue_name"] = "default" # Simulate the work of a Task print("Processing....") sleep(2.0) print("Done processing :)") # Get a reference to the task to pipe to. next_task = Task.get_task(project_name=task.get_project_name(), task_name=param["next_task_name"]) # Clone the task to pipe to. This creates a task with status Draft whose parameters can be modified. cloned_task = Task.clone(source_task=next_task, name="Auto generated cloned task") # Get the original parameters of the Task, modify the value of one parameter, # and set the parameters in the next Task cloned_task_parameters = cloned_task.get_parameters() cloned_task_parameters[param["param_name"]] = param["param_name_new_value"] cloned_task.set_parameters(cloned_task_parameters) # Enqueue the Task for execution. The enqueued Task must already exist in the clearml platform print("Enqueue next step in pipeline to queue: {}".format( param["execution_queue_name"])) Task.enqueue(cloned_task.id, queue_name=param["execution_queue_name"]) # We are done. The next step in the pipe line is in charge of the pipeline now. print("Done")
def cli(): title = 'ClearML launch - launch any codebase on remote machine running clearml-agent' print(title) parser = ArgumentParser(description=title) setup_parser(parser) # get the args args = parser.parse_args() if len(sys.argv) < 2: parser.print_help() exit(0) if args.version: print('Version {}'.format(__version__)) exit(0) if not args.name: raise ValueError( "Task name must be provided, use `--name <task-name>`") if args.docker_bash_setup_script and Path( args.docker_bash_setup_script).is_file(): with open(args.docker_bash_setup_script, "r") as bash_setup_script_file: bash_setup_script = bash_setup_script_file.readlines() # remove Bash Shebang if bash_setup_script and bash_setup_script[0].strip().startswith( "#!"): bash_setup_script = bash_setup_script[1:] else: bash_setup_script = args.docker_bash_setup_script or None create_populate = CreateAndPopulate( project_name=args.project, task_name=args.name, task_type=args.task_type, repo=args.repo or args.folder, branch=args.branch, commit=args.commit, script=args.script, working_directory=args.cwd, packages=args.packages, requirements_file=args.requirements, docker=args.docker, docker_args=args.docker_args, docker_bash_setup_script=bash_setup_script, output_uri=args.output_uri, base_task_id=args.base_task_id, add_task_init_call=not args.skip_task_init, raise_on_missing_entries=True, verbose=True, ) # verify args before creating the Task create_populate.update_task_args(args.args) print('Creating new task') create_populate.create_task() # update Task args create_populate.update_task_args(args.args) print('New task created id={}'.format(create_populate.get_id())) if not args.queue: print('Warning: No queue was provided, leaving task in draft-mode.') exit(0) Task.enqueue(create_populate.task, queue_name=args.queue) print('Task id={} sent for execution on queue {}'.format( create_populate.get_id(), args.queue)) print('Execution log at: {}'.format( create_populate.task.get_output_log_web_page()))