def test_interpolate(config_factory): '''Test interpolation of config''' myconfig = config_factory(test_cfg) assert subprocess.call( f'''sos config --set me '{{user_name}}@my' -c {myconfig}''', stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, shell=True) == 0 load_config_files(myconfig, default_config_files=False) assert get_config('me') == f'{getpass.getuser().lower()}@my'
def testInterpolate(self): '''Test interpolation of config''' self.assertEqual( subprocess.call( '''sos config --set me '{user_name}@my' -c myconfig.yml''', stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, shell=True), 0) load_config_files('myconfig.yml') self.assertEqual(env.sos_dict['CONFIG']['me'], f'{getpass.getuser().lower()}@my')
def test_get_config(config_factory): myconfig = config_factory({ 'val': 5, 'A': { 'B.C': '33', 'B.C1': { 'D': '34' }, 'D': '45' }, 'E': { 'F': { 'val': 6, 'val1': 10, 'G': '{val + val1}' }, 'H': '{val}' }, 'O': 'A{nonexisting}', 'X': '{os.environ.get("HOME", "no_home")}' }) load_config_files(myconfig) assert get_config('A', 'D') == '45' assert get_config('A.D') == '45' assert get_config(['A', 'D']) == '45' assert get_config(['A', 'D']) == '45' assert get_config('A.B.C') == '33' assert get_config('A.B.C1.D') == '34' assert get_config('A') == {'B.C': '33', 'B.C1': {'D': '34'}, 'D': '45'} assert get_config('E.F') == {'val': 6, 'val1': 10, 'G': '16'} assert get_config('E.F', val=7) == {'val': 6, 'val1': 10, 'G': '17'} assert get_config('E.F', val=7, allowed_keys=['G']) == {'G': '17'} assert get_config('E.F', val=7, val1=20) == { 'val': 6, 'val1': 10, 'G': '27' } assert get_config('E.F', { 'val': 8, 'val1': 30 }) == { 'val': 6, 'val1': 10, 'G': '38' } assert get_config('E.H', val=7) == '7' with pytest.raises(ValueError): get_config('O') assert get_config('O', nonexisting=7) == 'A7' assert get_config('X') == os.environ.get("HOME", "no_home")
def reset_dict(self): env.sos_dict.set('__null_func__', __null_func__) env.sos_dict.set('SOS_VERSION', __version__) env.sos_dict.set('__args__', self.args) if self.md5: env.sos_dict.set('__workflow_sig__', os.path.join( env.exec_dir, '.sos', f'{self.md5}.sig')) self._base_symbols = set(dir(__builtins__)) | set( env.sos_dict['sos_symbols_']) | set(keyword.kwlist) self._base_symbols -= {'dynamic', 'sos_run'} # load configuration files cfg = load_config_files(self.config['config_file']) # if check_readonly is set to True, allow checking readonly vars if cfg.get('sos', {}).get('change_all_cap_vars', None) is not None: if cfg['sos']['change_all_cap_vars'] not in ('warning', 'error'): env.logger.error( f'Configuration sos.change_all_cap_vars can only be warning or error: {cfg["sos"]["change_all_cap_vars"]} provided') else: env.sos_dict._change_all_cap_vars = cfg['sos']['change_all_cap_vars'] env.sos_dict.set('CONFIG', cfg) # set config to CONFIG file_target('config.yml').remove('both') # remove some variables because they would interfere with step analysis for key in ('_input', 'step_input'): env.sos_dict.pop(key, None) env.sos_dict.quick_update(self.shared) if isinstance(self.args, dict): for key, value in self.args.items(): if not key.startswith('__'): env.sos_dict.set(key, value)
def testConfigSet(self): '''Test interpolation of config''' self.assertEqual( subprocess.call('sos config --set cut 0.5 -c myconfig.yml', stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, shell=True), 0) load_config_files('myconfig.yml') self.assertEqual(env.sos_dict['CONFIG']['cut'], 0.5) # self.assertEqual( subprocess.call('sos config --set cut1 0.5 2 3 -c myconfig.yml', stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, shell=True), 0) load_config_files('myconfig.yml') self.assertEqual(env.sos_dict['CONFIG']['cut1'], [0.5, 2, 3]) # self.assertEqual( subprocess.call('sos config --set cut2 a3 -c myconfig.yml', stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, shell=True), 0) load_config_files('myconfig.yml') self.assertEqual(env.sos_dict['CONFIG']['cut2'], 'a3') # self.assertEqual( subprocess.call('sos config --set cut3 a b c -c myconfig.yml', stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, shell=True), 0) load_config_files('myconfig.yml') self.assertEqual(env.sos_dict['CONFIG']['cut3'], ['a', 'b', 'c']) # self.assertEqual( subprocess.call( '''sos config --set cut4 "{'A': 123}" -c myconfig.yml''', stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, shell=True), 0) load_config_files('myconfig.yml') self.assertEqual(env.sos_dict['CONFIG']['cut4'], {'A': 123})
def test_config_set(config_factory): '''Test interpolation of config''' myconfig = config_factory(test_cfg) assert subprocess.call(f'sos config --set cut 0.5 -c {myconfig}', stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, shell=True) == 0 load_config_files(myconfig) assert env.sos_dict['CONFIG']['cut'] == 0.5 # assert subprocess.call(f'sos config --set cut1 0.5 2 3 -c {myconfig}', stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, shell=True) == 0 load_config_files(myconfig) assert env.sos_dict['CONFIG']['cut1'] == [0.5, 2, 3] # assert subprocess.call(f'sos config --set cut2 a3 -c {myconfig}', stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, shell=True) == 0 load_config_files(myconfig) assert env.sos_dict['CONFIG']['cut2'] == 'a3' # assert subprocess.call(f'sos config --set cut3 a b c -c {myconfig}', stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, shell=True) == 0 load_config_files(myconfig) assert env.sos_dict['CONFIG']['cut3'] == ['a', 'b', 'c'] # assert subprocess.call( f'''sos config --set cut4 "{{'A': 123}}" -c {myconfig}''', stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, shell=True) == 0 load_config_files(myconfig) assert env.sos_dict['CONFIG']['cut4'] == {'A': 123}
def runfile(script=None, raw_args='', wdir='.', code=None, kernel=None, **kwargs): # this has something to do with Prefix matching rule of parse_known_args # # That is to say # # --rep 3 # # would be parsed as # # args.workflow=3, unknown --rep # # instead of # # args.workflow=None, unknown --rep 3 # # we then have to change the parse to disable args.workflow when # there is no workflow option. raw_args = shlex.split(raw_args) if isinstance(raw_args, str) else raw_args if (script is None and code is None) or '-h' in raw_args: parser = get_run_parser(interactive=True, with_workflow=True) parser.print_help() return if raw_args and raw_args[0].lstrip().startswith('-'): parser = get_run_parser(interactive=True, with_workflow=False) parser.error = _parse_error args, workflow_args = parser.parse_known_args(raw_args) args.workflow = None else: parser = get_run_parser(interactive=True, with_workflow=True) parser.error = _parse_error args, workflow_args = parser.parse_known_args(raw_args) # for reporting purpose sys.argv = ['%run'] + raw_args env.verbosity = args.verbosity if kernel and not isinstance(env.logger.handlers[0], NotebookLoggingHandler): env.logger.handlers = [] levels = { 0: logging.ERROR, 1: logging.WARNING, 2: logging.INFO, 3: logging.DEBUG, 4: logging.TRACE, None: logging.INFO } env.logger.addHandler( NotebookLoggingHandler(levels[env.verbosity], kernel, title=' '.join(sys.argv))) else: env.logger.handers[0].setTitle(' '.join(sys.argv)) dt = datetime.datetime.now().strftime('%m%d%y_%H%M') if args.__dag__ is None: args.__dag__ = f'workflow_{dt}.dot' elif args.__dag__ == '': args.__dag__ = None if args.__report__ is None: args.__report__ = f'workflow_{dt}.html' elif args.__report__ == '': args.__report__ = None if args.__remote__: from sos.utils import load_config_files cfg = load_config_files(args.__config__) env.sos_dict.set('CONFIG', cfg) # if executing on a remote host... from sos.hosts import Host host = Host(args.__remote__) # if script is None: if not code.strip(): return script = os.path.join('.sos', '__interactive__.sos') with open(script, 'w') as s: s.write(code) # copy script to remote host... host.send_to_host(script) from sos.utils import remove_arg argv = shlex.split(raw_args) if isinstance(raw_args, str) else raw_args argv = remove_arg(argv, '-r') argv = remove_arg(argv, '-c') # execute the command on remote host try: with kernel.redirect_sos_io(): ret = host._host_agent.run_command(['sos', 'run', script] + argv, wait_for_task=True, realtime=True) if ret: kernel.send_response( kernel.iopub_socket, 'stream', dict(name='stderr', text= f'remote execution of workflow exited with code {ret}' )) except Exception as e: if kernel: kernel.send_response(kernel.iopub_socket, 'stream', { 'name': 'stdout', 'text': str(e) }) return if args.__bin_dirs__: for d in args.__bin_dirs__: if d == '~/.sos/bin' and not os.path.isdir(os.path.expanduser(d)): os.makedirs(os.path.expanduser(d), exist_ok=True) os.environ['PATH'] = os.pathsep.join( [os.path.expanduser(x) for x in args.__bin_dirs__]) + os.pathsep + os.environ['PATH'] # clear __step_input__, __step_output__ etc because there is # no concept of passing input/outputs across cells. env.sos_dict.set('__step_output__', sos_targets([])) for k in [ '__step_input__', '__default_output__', 'step_input', 'step_output', 'step_depends', '_input', '_output', '_depends' ]: env.sos_dict.pop(k, None) try: if script is None: if not code.strip(): return if kernel is None: script = SoS_Script(content=code) else: if kernel._workflow_mode: # in workflow mode, the content is sent by magics %run and %sosrun script = SoS_Script(content=code) else: # this is a scratch step... # if there is no section header, add a header so that the block # appears to be a SoS script with one section if not any([ SOS_SECTION_HEADER.match(line) or line.startswith('%from') or line.startswith('%include') for line in code.splitlines() ]): code = '[scratch_0]\n' + code script = SoS_Script(content=code) else: #kernel.send_frontend_msg('stream', # {'name': 'stdout', 'text': 'Workflow cell can only be executed with magic %run or %sosrun.'}, # title='# SoS warning') return else: script = SoS_Script(filename=script) workflow = script.workflow(args.workflow, use_default=not args.__targets__) env.config: DefaultDict[str, Union[None, bool, str]] = defaultdict(str) executor = Interactive_Executor( workflow, args=workflow_args, config={ 'config_file': args.__config__, 'output_dag': args.__dag__, 'output_report': args.__report__, 'sig_mode': 'ignore' if args.dryrun else args.__sig_mode__, 'default_queue': '' if args.__queue__ is None else args.__queue__, 'wait_for_task': True if args.__wait__ is True or args.dryrun else (False if args.__no_wait__ else None), 'resume_mode': kernel is not None and kernel._resume_execution, 'run_mode': 'dryrun' if args.dryrun else 'interactive', 'verbosity': args.verbosity, # wait if -w or in dryrun mode, not wait if -W, otherwise use queue default 'max_procs': args.__max_procs__, 'max_running_jobs': args.__max_running_jobs__, # for infomration and resume only 'workdir': os.getcwd(), 'script': "interactive", 'workflow': args.workflow, 'targets': args.__targets__, 'bin_dirs': args.__bin_dirs__, 'workflow_args': workflow_args }) return executor.run(args.__targets__)['__last_res__'] except PendingTasks: raise except SystemExit: # this happens because the executor is in resume mode but nothing # needs to be resumed, we simply pass return except Exception: if args.verbosity and args.verbosity > 2: sys.stderr.write(get_traceback()) raise finally: env.config['sig_mode'] = 'ignore' env.verbosity = 2
def runfile(script=None, raw_args='', wdir='.', code=None, kernel=None, **kwargs): # this has something to do with Prefix matching rule of parse_known_args # # That is to say # # --rep 3 # # would be parsed as # # args.workflow=3, unknown --rep # # instead of # # args.workflow=None, unknown --rep 3 # # we then have to change the parse to disable args.workflow when # there is no workflow option. args = shlex.split(raw_args) if isinstance(raw_args, str) else raw_args if (script is None and code is None) or '-h' in args: parser = get_run_parser(interactive=True, with_workflow=True) parser.print_help() return if args and args[0].lstrip().startswith('-'): parser = get_run_parser(interactive=True, with_workflow=False) parser.error = _parse_error args, workflow_args = parser.parse_known_args(args) args.workflow = None else: parser = get_run_parser(interactive=True, with_workflow=True) parser.error = _parse_error args, workflow_args = parser.parse_known_args(args) # no multi-processing in interactive mode env.max_jobs = 1 env.verbosity = args.verbosity if args.__queue__ == '': from sos.hosts import list_queues list_queues(args.__config__, args.verbosity) return if args.__remote__: from sos.utils import load_config_files cfg = load_config_files(args.__config__) env.sos_dict.set('CONFIG', cfg) if args.__remote__ == '': from .hosts import list_queues list_queues(cfg, args.verbosity) return # if executing on a remote host... from sos.hosts import Host host = Host(args.__remote__) # if script is None: if not code.strip(): return script = os.path.join('.sos', '__interactive__.sos') with open(script, 'w') as s: s.write(code) # copy script to remote host... host.send_to_host(script) from sos.utils import remove_arg argv = shlex.split(raw_args) if isinstance(raw_args, str) else raw_args argv = remove_arg(argv, '-r') argv = remove_arg(argv, '-c') # execute the command on remote host try: with kernel.redirect_sos_io(): ret = host._host_agent.run_command(['sos', 'run', script] + argv, wait_for_task=True, realtime=True) if ret: kernel.send_response( kernel.iopub_socket, 'stream', dict(name='stderr', text= f'remote execution of workflow exited with code {ret}' )) except Exception as e: if kernel: kernel.send_response(kernel.iopub_socket, 'stream', { 'name': 'stdout', 'text': str(e) }) return if args.__bin_dirs__: import fasteners for d in args.__bin_dirs__: if d == '~/.sos/bin' and not os.path.isdir(os.path.expanduser(d)): with fasteners.InterProcessLock( os.path.join(tempfile.gettempdir(), 'sos_lock_bin')): os.makedirs(os.path.expanduser(d)) elif not os.path.isdir(os.path.expanduser(d)): raise ValueError(f'directory does not exist: {d}') os.environ['PATH'] = os.pathsep.join( [os.path.expanduser(x) for x in args.__bin_dirs__]) + os.pathsep + os.environ['PATH'] # clear __step_input__, __step_output__ etc because there is # no concept of passing input/outputs across cells. env.sos_dict.set('__step_output__', []) for k in ['__step_input__', '__default_output__', 'input', 'output', \ 'depends', '_input', '_output', '_depends']: env.sos_dict.pop(k, None) try: if script is None: if not code.strip(): return if kernel is None: script = SoS_Script(content=code) else: if kernel._workflow_mode: # in workflow mode, the content is sent by magics %run and %sosrun script = SoS_Script(content=code) else: # this is a scratch step... # if there is no section header, add a header so that the block # appears to be a SoS script with one section if not any([ SOS_SECTION_HEADER.match(line) or line.startswith('%from') or line.startswith('%include') for line in code.splitlines() ]): code = '[scratch_0]\n' + code script = SoS_Script(content=code) else: if kernel.cell_idx == -1: kernel.send_frontend_msg( 'stream', { 'name': 'stdout', 'text': 'Workflow can only be executed with magic %run or %sosrun.' }) return else: script = SoS_Script(filename=script) workflow = script.workflow(args.workflow) executor = Interactive_Executor( workflow, args=workflow_args, config={ 'config_file': args.__config__, 'output_dag': args.__dag__, 'sig_mode': args.__sig_mode__, 'default_queue': '' if args.__queue__ is None else args.__queue__, 'wait_for_task': True if args.__wait__ is True or args.dryrun else (False if args.__no_wait__ else None), 'resume_mode': kernel is not None and kernel._resume_execution, 'run_mode': 'dryrun' if args.dryrun else 'interactive', 'verbosity': args.verbosity, # wait if -w or in dryrun mode, not wait if -W, otherwise use queue default 'max_procs': 1, 'max_running_jobs': args.__max_running_jobs__, # for infomration and resume only 'workdir': os.getcwd(), 'script': "interactive", 'workflow': args.workflow, 'targets': args.__targets__, 'bin_dirs': args.__bin_dirs__, 'workflow_args': workflow_args }) return executor.run(args.__targets__) except PendingTasks: raise except SystemExit: # this happens because the executor is in resume mode but nothing # needs to be resumed, we simply pass return except Exception: if args.verbosity and args.verbosity > 2: sys.stderr.write(get_traceback()) raise finally: env.config['sig_mode'] = 'ignore' env.verbosity = 2