def fixt_chmodx(request, tmp_path): if request.param == 'not_a_file': xfile = tmp_path / 'chmodxtest_not_a_file' xfile.mkdir() return Diot(file=xfile, expt=OSError) elif request.param == 'success': xfile = tmp_path / 'chmodxtest_success' xfile.write_text('') return Diot(file=xfile, expt=[str(xfile)]) elif getuid() == 0: pytest.skip('I am root, I cannot fail chmod and read from shebang') elif request.param == 'failed_to_chmod': xfile = '/etc/passwd' return Diot(file=xfile, expt=OSError) elif request.param == 'from_shebang': if not path.isfile('/bin/zcat'): pytest.skip('/bin/zcat not exists.') else: return Diot(file='/bin/zcat', expt=['/bin/sh', '/bin/zcat']) elif request.param == 'unierr_shebang': xfile = '/bin/bash' if not path.isfile('/bin/bash'): pytest.skip('/bin/bash not exists.') else: return Diot(file=xfile, expt=OSError) # UnicodeDecodeError
def test_basic_box(): a = Diot(one=1, two=2, three=3) b = Diot({'one': 1, 'two': 2, 'three': 3}) c = Diot((zip(['one', 'two', 'three'], [1, 2, 3]))) d = Diot(([('two', 2), ('one', 1), ('three', 3)])) e = Diot(({'three': 3, 'one': 1, 'two': 2})) assert a == b == c == d == e
def test_prepinput_exc(job0, tmpdir): infile1 = tmpdir / 'test_prepinput_not_exists.txt' job0.proc.input = Diot( infile=('file', [[]]), # no a strin gor path or input [infile:file] ) with pytest.raises(JobInputParseError): job0._prepInput() job0.proc.input = Diot( nefile=('file', [infile1]), # not exists ) with pytest.raises(JobInputParseError): job0._prepInput() job0.proc.input = Diot( nlfiles=('files', [1]), # not a list ) with pytest.raises(JobInputParseError): job0._prepInput() job0.proc.input = Diot( npfiles=('files', [[None]]), # not a path ) with pytest.raises(JobInputParseError): job0._prepInput() job0.proc.input = Diot(nefiles=('files', [[infile1]])) with pytest.raises(JobInputParseError): job0._prepInput()
def on_filetree_req(self, data): """Request the content of a folder or a file item""" # data: proc, job, type, path, eleid, rootid # resp: proc, job, type, eleid, path, rootid, content data = Diot(data) resp = data.copy() workdir = pipeline_data.procs[data['proc']].props.workdir jobdir = Path(workdir) / str(data.job) path = jobdir.joinpath(data.path) resp.name = path.name if data.type == 'folder': resp.content = [] for item in path.iterdir(): resp.content.append( Diot(name=item.name, path=(f"{data.path}/{item.name}" if data.path else item.name), type='folder' if item.is_dir() else _filetype(item))) elif data.type == 'image': with open(path, 'rb') as fimg: resp.content = fimg.read() elif path.stat().st_size > 1024 * 1024: resp.type = False else: with open(path, 'rt') as fpath: resp.content = fpath.read() emit('filetree_resp', resp)
def _cmdy_fix_popen_config(popen_config: Diot): """Fix when env wrongly passed as envs. Send the whole `os.environ` instead of a piece of it given by popen_config.env And also raise warnings if configs should not go with popen but rather cmdy. """ if 'envs' in popen_config: if 'env' in popen_config: warnings.warn( "Both envs and env specified in popen args, envs will" "be ignored") del popen_config['envs'] else: popen_config['env'] = popen_config.pop('envs') if 'env' in popen_config: normalized_env = {} for key, value in popen_config.env.items(): if isinstance(value, bool): value = int(value) normalized_env[key] = str(value) envs = environ.copy() envs.update(normalized_env) popen_config.env = envs for pipe in ('stdin', 'stdout', 'stderr'): if pipe in popen_config: warnings.warn("Motifying pipes are not allowed. " "Values will be ignored") del popen_config[pipe]
def test_to_yaml(): import yaml a = Diot(**test_dict) assert yaml.load(a.to_yaml(), Loader=yaml.SafeLoader) == { key: val for key, val in test_dict.items() if key != 'diot_nest' }
def test_to_toml(): import rtoml a = Diot(**test_dict) assert rtoml.loads(a.to_toml()) == { key: val for key, val in test_dict.items() if key != 'diot_nest' }
def on_run_request(self, data): """Running a command/script""" logger.debug('Got request run_request from client ' f'{request.remote_addr}') # eleid: logger.id, # proc: that.proc, # job: that.job, # target: type, # cmd: $(this).val()}); data = Diot(data) running = pipeline_data.setdefault('running', Diot()) if data.eleid in running: # is already running return cmd = data.cmd if 'target' in data: workdir = pipeline_data.procs[data['proc']].props.workdir target = Path(workdir) / str(int(data.job)) / str(data.target) target = repr(str(target)) cmd = cmd + ' ' + target if cmd else target running[data.eleid] = { 'cmdy': cmdy.bash(c=cmd, _raise=False).iter, 'buffer': '' }
def fixt_fileflush(request, fd_fileflush): fd_read, fd_append = fd_fileflush if request.param == 0: return Diot(filed=fd_read, residue='', expt_lines=[], expt_residue='') if request.param == 1: fd_append.write('abcde') fd_append.flush() return Diot(filed=fd_read, residue='', expt_lines=[], expt_residue='abcde') if request.param == 2: fd_append.write('ccc\ne1') fd_append.flush() return Diot(filed=fd_read, residue='abcde', expt_lines=['abcdeccc\n'], expt_residue='e1') if request.param == 3: fd_append.write('ccc') fd_append.flush() return Diot(filed=fd_read, residue='', end=True, expt_lines=['ccc\n'], expt_residue='') if request.param == 4: return Diot(filed=fd_read, residue='end', end=True, expt_lines=['end\n'], expt_residue='')
def test_trydeepcopy(): def tryDeepCopy(obj, _recurvise=True): """ Try do deepcopy an object. If fails, just do a shallow copy. @params: obj (any): The object _recurvise (bool): A flag to avoid deep recursion @returns: The copied object """ if _recurvise and isinstance(obj, dict): # do a shallow copy first # we don't start with an empty dictionary, because obj may be # an object from a class extended from dict ret = obj.copy() for key, value in obj.items(): ret[key] = tryDeepCopy(value, False) return ret if _recurvise and isinstance(obj, list): ret = obj[:] for i, value in enumerate(obj): ret[i] = tryDeepCopy(value, False) return ret try: return deepcopy(obj) except TypeError: return obj dt = Diot(a=Diot(b=Diot(c=1))) dt3 = tryDeepCopy(dt) assert dt3 == dt
def test_box_inits(): a = Diot({'data': 2, 'count': 5}) b = Diot(data=2, count=5) c = Diot({'data': 2, 'count': 1}, count=5) d = Diot([('data', 2), ('count', 5)]) e = Diot({'a': [{'item': 3}, {'item': []}]}, diot_nest=[dict, list]) assert e.a[1].item == [] assert a == b == c == d
def proc_init(proc): """Add config""" proc.add_config('report_template', default='', converter=report_template_converter) proc.add_config('report_envs', default=Diot(), converter=lambda envs: envs or Diot())
def test_create_subdicts(): a = Diot({'data': 2, 'count': 5}, diot_nest=[dict, list]) a.brand_new = {'subdata': 1} assert a.brand_new.subdata == 1 a.new_list = [{'sub_list_item': 1}] assert a.new_list[0].sub_list_item == 1 assert isinstance(a.new_list, list) a.new_list2 = [[{'sub_list_item': 2}]] assert a.new_list2[0][0].sub_list_item == 2
def test_to_toml_file(tmp_path): import rtoml a = Diot(**test_dict) tmp_toml_file = tmp_path / 'diot_test_to_toml.toml' a.to_toml(tmp_toml_file) with open(tmp_toml_file) as f: data = rtoml.load(f) assert data == { key: val for key, val in test_dict.items() if key != 'diot_nest' }
def test_to_yaml_file(tmp_path): import yaml a = Diot(**test_dict) tmp_yaml_file = tmp_path / 'diot_test_to_yaml.yaml' a.to_yaml(tmp_yaml_file) with open(tmp_yaml_file) as f: data = yaml.load(f, Loader=yaml.SafeLoader) assert data == { key: val for key, val in test_dict.items() if key != 'diot_nest' }
def test_conversion_box(): bx = Diot(extended_test_dict, diot_nest=[dict, list]) assert list(bx.accessible_keys()) == [ '_3', '_not', '_box_config', 'CamelCase', '_321CamelCase', '_False', 'tuples_galore', 'key1', 'diot_nest', 'not_allowed', 'BigCamel', 'alist', 'Key_2' ] assert bx.Key_2.Key_3 == "Value 3" assert bx._3 == 'howdy' assert bx._not == 'true' with pytest.raises(AttributeError): getattr(bx, "(3, 4)")
def test_is_in(): bx = Diot() dbx = Diot() assert "a" not in bx assert "a" not in dbx bx["b"] = 1 dbx["b"] = {} assert "b" in bx assert "b" in dbx bx["a_@_b"] = 1 assert "a__b" in bx assert "a_@_b" in bx delattr(bx, "a_@_b")
def test_submit(job0, caplog): job0.isRunningImpl = lambda: True assert job0.submit() assert 'is already running at' in caplog.text job0.isRunningImpl = lambda: False job0.submitImpl = lambda: Diot(rc=0) assert job0.submit() job0.submitImpl = lambda: Diot(rc=1, cmd='', stderr='') caplog.clear() assert not job0.submit() assert 'Submission failed' in caplog.text
def __init__(self, *procs, **kwargs): """@API Constructor @params: *procs (Proc) : the set of processes **kwargs: Other arguments to instantiate a `ProcSet` depends (bool): Whether auto deduce depends. Default: `True` id (str): The id of the procset. Default: `None` (the variable name) tag (str): The tag of the processes. Default: `None` copy (bool): Whether copy the processes or just use them. Default: `True` """ self.__dict__['id'] = kwargs.get('id') or varname(context = 101) self.__dict__['tag'] = kwargs.get('tag') self.__dict__['starts'] = Proxy() self.__dict__['ends'] = Proxy() self.__dict__['delegates'] = OrderedDiot() self.__dict__['procs'] = OrderedDiot() self.__dict__['modules'] = Diot(diot_nest = False) # save initial states before a module is called # states will be resumed before each module is called self.__dict__['initials'] = Diot(diot_nest = False) ifcopy = kwargs.get('copy', True) depends = kwargs.get('depends', True) prevproc = None for proc in procs: assert hasattr(proc, 'id') and hasattr(proc, 'tag'), \ 'Argument has to be a Proc object: %r.' % proc if ifcopy: self.procs[proc.id] = proc.copy(proc.id, tag = (self.tag or proc.tag.split('@', 1)[0]) + '@' + self.id) else: self.procs[proc.id] = proc proc.config.tag = (self.tag or proc.tag.split('@', 1)[0]) + '@' + self.id if depends and prevproc is None: self.starts.add(self[proc.id]) if depends and prevproc: self.procs[proc.id].depends = prevproc prevproc = self.procs[proc.id] if depends and prevproc: self.ends.add(prevproc) self.delegate('input', 'starts') self.delegate('depends', 'starts') self.delegate('ex*', 'ends')
def on_logger_request(self, data): """When requesting the output of a running command""" logger.debug('Got request logger_request from client ' f'{request.remote_addr}') # proc: that.proc, # job: that.job_index(), # reqlog: reqlog, # eleid: this.id # resp # // data.log: the message got to show # // data.isrunning: whether the process is still running data = Diot(data) running = pipeline_data.setdefault('running', Diot()) resp = data resp.isrunning = False resp.log = '' if data.eleid in running: rdata = running[data.eleid] resp.isrunning = True resp.pid = rdata['cmdy'].pid buffer, done = read_cmdout(rdata['cmdy']) rdata['buffer'] = rdata.get('buffer', '') + buffer if data.reqlog == 'all': resp.log = rdata['buffer'] elif data.reqlog == 'more': resp.log = buffer elif data.reqlog == 'kill': cmdy.kill({'9': resp.pid}, _raise=False) # killing succeeded? resp.isrunning = 'killed' resp.log = '' done = False try: # this could be deleted by previous request del running[data.eleid] except KeyError: pass if done: resp.isrunning = 'done' try: del running[data.eleid] except KeyError: pass logger.debug('Sending response logger_response to client ' f'{request.remote_addr}') emit('logger_response', resp)
def test_fix_popen_config(): config = Diot({'envs': {'x': 1}, 'env': {'x': 2}}) with pytest.warns(UserWarning): _cmdy_fix_popen_config(config) assert 'envs' not in config assert config['env']['x'] == '2' config = Diot({'envs': {'x': True}}) _cmdy_fix_popen_config(config) assert 'envs' not in config assert config['env']['x'] == '1' # other envs loaded assert len(config['env']) > 1
def test_from_namespace(): ns = Namespace(a=1, b=2) d = Diot.from_namespace(ns) assert len(d) == 2 assert d.a == 1 assert d.b == 2 # recursive ns.c = Namespace() d2 = Diot.from_namespace(ns, recursive=True) assert len(d2) == 3 assert isinstance(d2.c, Diot) d3 = Diot.from_namespace(ns, recursive=False) assert isinstance(d3.c, Namespace)
def fixt_funcsig(request): if request.param == 0: def func1(): pass return Diot(func=func1, expt="def func1(): pass") if request.param == 1: func2 = lambda: True return Diot(func=func2, expt='func2 = lambda: True') if request.param == 2: return Diot(func="", expt="None") if request.param == 3: return Diot(func="A", expt="None")
def test_report(job0, caplog): job0.proc._log.shorten = 10 job0.input = Diot( a=('var', 'abcdefghijkmnopq'), bc=('files', ['/long/path/to/file1']), de=('file', '/long/path/to/file2'), ) job0.output = Diot(outfile=('file', '/path/to/output/file1'), outfiles=('files', ['/path/to/output/file2'])) job0.report() assert 'pProc: [1/1] a => ab ... pq' in caplog.text assert 'pProc: [1/1] bc => [ /l/p/t/file1 ]' in caplog.text assert 'pProc: [1/1] de => /l/p/t/file2' in caplog.text assert 'pProc: [1/1] outfile => /p/t/o/file1' in caplog.text assert 'pProc: [1/1] outfiles => [ /p/t/o/file2 ]' in caplog.text
def fixt_filesig(request, tmp_path): if not request.param: return Diot(file='', expt=['', 0]) if request.param == 'a_file': afile = tmp_path / 'filesig_afile' afile.write_text('') return Diot(file=afile, expt=[str(afile), int(path.getmtime(afile))]) if request.param == 'nonexists': return Diot(file='/path/to/__non_exists__', expt=False) if request.param == 'a_link': alink = tmp_path / 'filesig_alink' alink_orig = tmp_path / 'filesig_alink_orig' alink_orig.write_text('') alink.symlink_to(alink_orig) return Diot(file=alink, expt=[str(alink), int(path.getmtime(alink_orig))]) if request.param == 'a_link_to_dir': alink = tmp_path / 'filesig_alink_to_dir' adir = tmp_path / 'filesig_adir' adir.mkdir() alink.symlink_to(adir) return Diot(file=alink, expt=[str(alink), int(path.getmtime(adir))]) if request.param == 'a_dir_with_subdir': adir = tmp_path / 'filesig_another_dir' adir.mkdir() utime(adir, (path.getmtime(adir) + 100, ) * 2) asubdir = adir / 'filesig_another_subdir' asubdir.mkdir() return Diot(file=adir, expt=[str(adir), int(path.getmtime(adir))]) if request.param == 'a_dir_with_file': adir = tmp_path / 'filesig_another_dir4' adir.mkdir() utime(adir, (path.getmtime(adir) - 100, ) * 2) afile = adir / 'filesig_another_file4' afile.write_text('') return Diot(file=adir, expt=[str(adir), int(path.getmtime(afile))]) if request.param == 'a_dir_subdir_newer': adir = tmp_path / 'filesig_another_dir2' adir.mkdir() utime(adir, (path.getmtime(adir) - 100, ) * 2) asubdir = adir / 'filesig_another_subdir2' asubdir.mkdir() return Diot(file=adir, expt=[str(adir), int(path.getmtime(asubdir))]) if request.param == 'a_dir_subdir_newer_dirsig_false': adir = tmp_path / 'filesig_another_dir3' adir.mkdir() utime(adir, (path.getmtime(adir) - 100, ) * 2) asubdir = adir / 'filesig_another_subdir3' asubdir.mkdir() return Diot(file=adir, dirsig=False, expt=[str(adir), int(path.getmtime(adir))])
def __init__( self, # pylint: disable=too-many-arguments filename, prev, config, stream=None, shared_code=None, code=None): """@API Constructor for LiquidParser @params: filename (str): The filename of the template prev (LiquidParse): The previous parser config (LiquidConfig): The configuration stream (stream): The stream to parse instead of the file of filename shared_code (LiquidCode): The shared code code (LiquidCode): The code object """ self.stream = stream or LiquidStream.from_file(filename) self.shared_code = shared_code self.code = code or LiquidCode() # previous lineno and parser, to get call stacks self.prev = prev nstack = prev[1].context.nstack + 1 if prev else 1 LOGGER.debug("[PARSER%2s] Initialized from %s", nstack, filename) # deferred nodes self.deferred = [] self.config = config self.context = Diot( filename=filename, lineno=1, history=[], # the data passed on during parsing data=Diot(), stacks=[], # which parser stack I am at nstack=nstack, # attach myself to the context parser=self) # previous closing tag # we need it to do compact for next literal self.endtag = None if nstack >= LIQUID_MAX_STACKS: raise LiquidSyntaxError(f'Max stacks ({nstack}) reached', self.context)
def do_case(name, case): """Do scatter with one case""" case = Diot( convert_args=convert_args, threshold=threshold, min_probes=min_probes, male_reference=male_reference, sample_sex=sample_sex or False, no_shift_xy=no_shift_xy, title=title, ) | case conv_args = case.pop("convert_args") pdffile = Path(outdir).joinpath(f"{name}.heatmap.pdf") pngfile = Path(outdir).joinpath(f"{name}.heatmap.png") cmdy.cnvkit.diagram( **case, s=cnsfile, o=pdffile, _=cnrfile, _exe=cnvkit, ).fg() cmdy.convert( **conv_args, _=[pdffile, pngfile], _prefix="-", _exe=convert, ).fg()
def parse(self, force=False): # type: (bool) -> None """Parse the include template""" if not super().parse(force): return path = self.parsed[0] # pylint: disable=access-member-before-definition path = str(path) try: include_template = find_template(path, self.context.path, self.parser.config.include_dir) if not include_template or not include_template.is_file(): raise OSError except OSError: raise LiquidSyntaxError( f'Cannot find template: {path!r} ({self!r})', self.context, self.parser) from None meta = template_meta(include_template) inc_parser = self.parser.__class__( meta, self.parser.config, Diot(name=meta.name, stream=meta.stream, path=meta.path, colno=0, lineno=0, level=self.context.level + 1)) inc_parser.parse() inc_parser.parent = self.parser inc_parser.config.update_logger() # pylint: disable=attribute-defined-outside-init self.parsed = inc_parser, self.parsed[1]
def _cmdy_normalize_config(config: Diot): """Normalize shell and okcode to list""" if 'okcode' in config: if isinstance(config.okcode, str): config.okcode = [okc.strip() for okc in config.okcode.split(',')] if not isinstance(config.okcode, list): config.okcode = [config.okcode] config.okcode = [int(okc) for okc in config.okcode] if 'shell' in config and config.shell: if config.shell is True: config.shell = ['/bin/bash', '-c'] if not isinstance(config.shell, list): config.shell = [config.shell, '-c'] elif len(config.shell) == 1: config.shell.append('-c')
def test_to_json(tmp_path): import json a = Diot(**test_dict) assert json.loads(a.to_json(indent=0)) == { key: val for key, val in test_dict.items() if key != 'diot_nest' } tmp_json_file = tmp_path / 'diot_test_to_json.json' a.to_json(tmp_json_file) with open(tmp_json_file) as f: data = json.load(f) assert data == { key: val for key, val in test_dict.items() if key != 'diot_nest' }