def create_experiment_for_spec(parameters): script = 'run_miniimagenet.py' # this will be also displayed in jobs on prometheus name = 'tf_reptile_reproduce' project_name = "deepsense-ai-research/meta-learning-reptile" python_path = '.:specs' paths_to_dump = '' # e.g. 'plgrid tensor2tensor', do we need it? tags = 'mrunner reproduce'.split(' ') parameters['git_head'] = get_git_head_info() modes = [ 'o15', 'o15t', 'o55', 'o55t', 'o120', 'o120t', 'o520', 'o520t', 'm15', 'm15t', 'm55', 'm55t' ] reproduction_ckpts = ['ckpt_' + mode for mode in modes] exclude = ['.idea', 'offline_job.log', 'output', 'neptune.log' ] + reproduction_ckpts return Experiment( project=project_name, name=name, script=script, parameters=parameters, python_path=python_path, paths_to_dump=paths_to_dump, tags=tags, exclude=exclude, time='2-0:0' # days-hours:minutes )
def test_generate_template(self): context = Context(storage='/storage') experiment = Experiment( base_image='python:3', paths_to_copy=['.', 'src', 'tests'], cmd=DObject( command='neptune run foo.py --storage /storage -- --epochs 2', env={})) dockerfile = GeneratedTemplateFile( template_filename='Dockerfile.jinja2', context=context, experiment=experiment, requirements_file='requirements.txt') dockerfile_payload = Path(dockerfile.path).text(encoding='utf-8') expected_dockerfile_payload = '''FROM python:3 ARG EXP_DIR=/experiment ARG STORAGE_DIR=/storage COPY requirements.txt ${EXP_DIR}/requirements.txt RUN pip install --no-cache-dir -r $EXP_DIR/requirements.txt COPY . ${EXP_DIR}/. COPY src ${EXP_DIR}/src COPY tests ${EXP_DIR}/tests ENV STORAGE_DIR=${STORAGE_DIR} VOLUME ${STORAGE_DIR} VOLUME ${EXP_DIR} WORKDIR ${EXP_DIR} ENTRYPOINT ["neptune", "run", "foo.py", "--storage", "/storage", "--"]''' self.assertEqual(dockerfile_payload, expected_dockerfile_payload)
def flop_simple_handle_experiment(experiment_name, base_config, project_name, params_grid, script, python_path, paths_to_dump, exclude, project_tag, update_lambda=lambda d1, d2: d1.update(d2), _script_name=None): random_tag = get_random_name() _script_name = None if _script_name is None else pathlib.Path( _script_name).stem tags = ["flop", project_tag, random_tag] if _script_name: tags.append(_script_name) params_configurations = get_combinations(params_grid) base_config['git_head'] = get_git_head_info() experiments = [] for params_configuration in params_configurations: config = copy.deepcopy(base_config) update_lambda(config, params_configuration) experiments.append( Experiment(project=project_name, name=experiment_name, script=script, parameters=config, python_path=python_path, paths_to_dump=paths_to_dump, tags=tags, exclude=exclude)) return experiments
def create_experiment_for_spec(parameters): script = 'baselines/acktr/run_atari_training.py' # this will be also displayed in jobs on prometheus name = 'lkryston, acktr sexp' project_name = "sil-montezuma" python_path = '.:exp_utils:some/other/utils/path' paths_to_dump = '' # e.g. 'plgrid tensor2tensor', do we need it? tags = 'lkryston acktr_sexp expert'.split(' ') parameters['git_head'] = get_git_head_info() return Experiment(project=project_name, name=name, script=script, parameters=parameters, python_path=python_path, paths_to_dump=paths_to_dump, tags=tags, time='1-0' # days-hours )
def create_experiment_for_spec(parameters): script = 'some_src/experiment.py' # this will be also displayed in jobs on prometheus name = 'your initials, experiment name' project_name = "sandbox" python_path = '.:some_utils:some/other/utils/path' paths_to_dump = '' # e.g. 'plgrid tensor2tensor', do we need it? tags = 'test_user other_tag'.split(' ') parameters['git_head'] = get_git_head_info() return Experiment( project=project_name, name=name, script=script, parameters=parameters, python_path=python_path, paths_to_dump=paths_to_dump, tags=tags, time='1-0' # days-hours )
def create_experiment_for_spec(parameters): script = 'run_omniglot.py' # this will be also displayed in jobs on prometheus name = 'tf_reptile_reproduce' project_name = "deepsense-ai-research/meta-learning-reptile" python_path = '.:specs' paths_to_dump = '' # e.g. 'plgrid tensor2tensor', do we need it? tags = 'mrunner reproduce'.split(' ') parameters['git_head'] = get_git_head_info() return Experiment( project=project_name, name=name, script=script, parameters=parameters, python_path=python_path, paths_to_dump=paths_to_dump, tags=tags, time='2-0:0' # days-hours:minutes )
from munch import Munch from mrunner.experiment import Experiment import os if "NEPTUNE_API_TOKEN" not in os.environ or "PROJECT_QUALIFIED_NAME" not in os.environ: print( "Please set NEPTUNE_API_TOKEN and PROJECT_QUALIFIED_NAME env variables" ) print( "Their values can be from up.neptune.ml. Click help and then quickstart." ) exit(1) tags = os.environ["PROJECT_TAG"].split( ' ') if "PROJECT_TAG" in os.environ.keys() else [] exp = Experiment( name='HER experiment', script='python3 run.py', # script='python3 dqn_scratch.py', # script='python3 dqn_episodic.py', # script='python3 metric_scratch.py', project=os.environ["PROJECT_QUALIFIED_NAME"], tags=tags, env={"NEPTUNE_API_TOKEN": os.environ["NEPTUNE_API_TOKEN"]}, parameters=Munch(param1=10)) # A specification file must contain list of experiments in experiments_list variable. Here just one. experiments_list = [exp]
import os from mrunner.experiment import Experiment cmds = [] cmds.append( f'python compositionality/main.py --batch_size 64 --n_epochs 20000 --max_len 2 --vocab_size 15 --validation_freq 10 --n_features 10' ) experiments_list = [ Experiment( project='', name='', parameters=None, script=cmd, python_path='.', paths_to_dump='', env={"NEPTUNE_API_TOKEN": os.environ["NEPTUNE_API_TOKEN"]}, ) for cmd in cmds ]