def dataProvider_testFromPairs(self): files1 = [path.join(self.testdir, 'testFromPairs1%s.txt' % i) for i in range(0, 4)] files2 = [path.join(self.testdir, 'testFromPairs2%s.txt' % i) for i in range(0, 4)] for f in files1 + files2: helpers.writeFile(f) yield path.join(self.testdir, 'testFromPairs1?.txt'), Channel.create([(files1[0], files1[1]), (files1[2], files1[3])]) yield path.join(self.testdir, 'testFromPairs2?.txt'), Channel.create([(files2[0], files2[1]), (files2[2], files2[3])])
def dataProvider_testFromFile(self): file1 = path.join(self.testdir, 'testFromFile1.txt') helpers.writeFile(file1, "a1\tb1\tc1\n" + "a2\tb2\tc2") outs = Channel.create([("a1", "b1", "c1"), ("a2", "b2", "c2")]) yield file1, outs, False, 0, '\t' # head & delimit file2 = path.join(self.testdir, "testFromFile2.txt") helpers.writeFile(file2, "a,b,c\n" + "a1,b1,c1\n" + "a2,b2,c2") outs = Channel.create([("a1", "b1", "c1"), ("a2", "b2", "c2")]) yield file2, outs, ['a', 'b', 'c'], 0, ',' # skip file3 = path.join(self.testdir, "testFromFile3.txt") helpers.writeFile( file3, "#a,b,c\n" + "#a,b,c\n" + "b,c\n" + "a1,b1,c1\n" + "a2,b2,c2") outs = Channel.create([("a1", "b1", "c1"), ("a2", "b2", "c2")]) yield file3, outs, ['RowNames', 'b', 'c'], 2, ',' # error file4 = path.join(self.testdir, "testFromFile4.txt") helpers.writeFile(file4, "#a,b,c\n" + "b,c,d,e\n" + "a1,b1,c1\n" + "a2,b2,c2") yield file4, [], ['a'], 1, ',', True
def dataProvider_testExpand(self): # empty self yield Channel.create(), 0, [] # defaults dir1 = path.join(self.testdir, 'testExpand') file1 = path.join(dir1, 'testExpand1.txt') file2 = path.join(dir1, 'testExpand2.txt') makedirs(dir1) helpers.writeFile(file1) helpers.writeFile(file2) yield Channel.create(dir1), 0, [file1, file2] # extra columns dir2 = path.join(self.testdir, 'testExpand2') file3 = path.join(dir2, 'testExpand3.txt') file4 = path.join(dir2, 'testExpand4.txt') makedirs(dir2) helpers.writeFile(file3) helpers.writeFile(file4) yield Channel.create(('a', 1, dir2)), 2, [('a', 1, file3), ('a', 1, file4)] # pattern not exists yield Channel.create(('a', 1, dir2)), 2, [], 'a.*' # expand respectively yield Channel.create([('a', 1, dir1), ('b', 2, dir2)]), 2, Channel.create([ ('a', 1, file1), ('a', 1, file2), ('b', 2, file3), ('b', 2, file4), ])
def dataProvider_testCollapse(self): # empty self yield Channel.create(), 0, [], True # defaults dir1 = path.join(self.testdir, 'testCollapse') file1 = path.join(dir1, 'testCollapse1.txt') file2 = path.join(dir1, 'testCollapse2.txt') makedirs(dir1) helpers.writeFile(file1) helpers.writeFile(file2) yield Channel.create([file1, file2]), 0, dir1 # Extra cols yield Channel.create([('a1', file1, 'a2'), ('b1', file2, 'b2')]), 1, ('a1', dir1, 'a2') # No common prefix yield Channel.create([('a1', file1, 'a2'), ('b1', file2, 'b2')]), 0, ('', file1, 'a2')
def dataProvider_testBuildScript(self): pBuildScript = Proc() pBuildScript.ppldir = self.testdir yield pBuildScript, '', '#!/usr/bin/env bash\n', None, None, [ 'WARNING', 'No script specified' ] tplfile = path.join(self.testdir, 'scriptTpl.txt') helpers.writeFile(tplfile, [ 'A', 'B', 'Repeat1', '', 'C', ' ### PYPPL INDENT REMOVE', ' D', ' E', ' # PYPPL INDENT KEEP ###', ' F' ]) yield pBuildScript, 'file:' + tplfile, [ '#!/usr/bin/env bash', 'A', 'B', 'Repeat1', '', 'C', 'D', ' E', ' F', '' ] tplfile1 = path.join(self.testdir, 'scriptTpl1.txt') helpers.writeFile(tplfile1, [ 'A', 'B', 'Repeat1', 'Repeat2', '', 'Repeat3', 'C', ' ### PYPPL INDENT REMOVE', ' D', ' # PYPPL INDENT REMOVE', ' E', ' # PYPPL INDENT KEEP ###', ' F', ]) yield pBuildScript, 'file:' + tplfile1, [ '#!/usr/bin/env bash', 'A', 'B', 'Repeat1', 'Repeat2', '', 'Repeat3', 'C', 'D', 'E', ' F', '' ] tplfile = path.join(self.testdir, 'nosuchtpl') yield pBuildScript, 'file:' + tplfile, '', ProcScriptError, 'No such template file:'
def dataProvider_testBuildJobs(self): pBuildJobs = Proc() pBuildJobs.ppldir = self.testdir infile1 = path.join(self.testdir, 'pBuildJobs-in1.txt') infile2 = path.join(self.testdir, 'pBuildJobs-in2.txt') helpers.writeFile(infile1) helpers.writeFile(infile2) pBuildJobs.input = {'a': 1, 'b:file': [infile1, infile2], 'c:files': [[infile1, infile2]]} pBuildJobs.output = 'out:file:{{in.b | fn}}-{{in.a}}.out' pBuildJobs.script = 'echo {{in.a}} > {{out.out}}' with helpers.log2str(levels = 'all') as (out, err): pBuildJobs._buildProps () pBuildJobs._buildInput () pBuildJobs._buildProcVars () #pBuildJobs._buildBrings () pBuildJobs._buildOutput() pBuildJobs._buildScript() yield pBuildJobs, 2, [ path.join(pBuildJobs.workdir, '1', 'output', 'pBuildJobs-in1-1.out'), path.join(pBuildJobs.workdir, '2', 'output', 'pBuildJobs-in2-1.out') ], ['out'], [ 'INPUT', '/2] a => 1', '/2] b => %s' % pBuildJobs.workdir, # '/2] _b => %s' % testdir, '/2] c => [ %s' % pBuildJobs.workdir, '/2] %s' % pBuildJobs.workdir, # '/2] _c => [%s' % testdir, # '/2] %s' % testdir, 'OUTPUT', '/2] out => %s' % pBuildJobs.workdir ] pBuildJobs1 = Proc() pBuildJobs1.ppldir = self.testdir yield pBuildJobs1, 0, [], [], [ 'WARNING', 'No data found for jobs, process will be skipped.' ]
def testNoYaml(self, testdir): PyPPL.DEFAULT_CFGFILES = [] ymlfile = path.join(testdir, 'config.yaml') helpers.writeFile(ymlfile, ['default:', ' forks: 10']) import sys if helpers.moduleInstalled('yaml'): import yaml del sys.modules['yaml'] paths = [] paths.extend(sys.path) del sys.path[:] #while sys.path: # paths.append(sys.path.pop(0)) with helpers.log2str(levels='all') as (out, err): pp = PyPPL(config={'_log': {'file': True}}, cfgfile=ymlfile) #for p in paths: sys.path.append(p) sys.path = paths self.assertDictEqual(pp.config, {}) logfiles = glob(path.splitext(sys.argv[0])[0] + "*.pyppl.log") self.assertTrue(logfiles) for logfile in logfiles: remove(logfile)
def dataProvider_testLoadFile(self): yield self.testdir, False, [] jsonfile = path.join(self.testdir, 'testLoadFile.json') helpers.writeFile( jsonfile, '\n'.join([ '{', ' "a": "2",', ' "a.desc": "Option a",', ' "a.type": "int",', ' "a.required": true', '}', ])) p1 = Parameter('a', 2) p1.desc = "Option a" p1.required = True yield jsonfile, True, [p1] if helpers.moduleInstalled('yaml'): yamlfile = path.join(self.testdir, 'testLoadFile.yaml') helpers.writeFile( yamlfile, '\n'.join([ 'a: 2', 'a.desc: Option a', 'a.type: int', 'a.required: false', 'a.show: true', '', ])) p2 = Parameter('a', 2) p2.desc = "Option a" p2.required = False p2.show = True yield yamlfile, False, [p2] conffile = path.join(self.testdir, 'testLoadFile.conf') helpers.writeFile( conffile, '\n'.join([ '[PARAM1]', 'a = 2', 'a.desc = Option a', 'a.type = int', 'a.required = f', '[PARAM2]', 'a.type = str', 'b:', ' 1', ' 2', 'b.type = list', ])) p3 = Parameter('a', '2') p3.desc = "Option a" p3.required = False p4 = Parameter('b', ['1', '2']) yield conffile, True, [p3, p4]
def dataProvider_testTidyAfterRun(self): pTidyAfterRun = Proc() pTidyAfterRun.props['callback'] = lambda p: p.log('goodbye') yield pTidyAfterRun, 'terminate', 'skip+', None, [ 'DEBUG', 'Calling callback ...', 'INFO', 'goodbye' ] pTidyAfterRun1 = Proc() pTidyAfterRun1.ppldir = self.testdir pTidyAfterRun1.input = {'in': [1,2]} pTidyAfterRun1.props['callback'] = lambda p: p.log('goodbye') pTidyAfterRun1._tidyBeforeRun() # write rc to job.rc for job in pTidyAfterRun1.jobs: helpers.writeFile(job.rcfile, 0) yield pTidyAfterRun1, 'terminate', '', None, [ 'DEBUG', 'Successful jobs: ALL', 'INFO', 'goodbye' ] pTidyAfterRun2 = Proc() pTidyAfterRun2.ppldir = self.testdir pTidyAfterRun2.input = {'in': [1,2]} pTidyAfterRun2._tidyBeforeRun() # write rc to job.rc helpers.writeFile(pTidyAfterRun2.jobs[0].rcfile, 0) helpers.writeFile(pTidyAfterRun2.jobs[1].rcfile, 1) yield pTidyAfterRun2, 'terminate', '', SystemExit, [ 'ERROR', 'failed (totally 1). Return code: 1 (Script error).', '[2/2] Script:', '[2/2] Stdout:', '[2/2] Stderr:', '[2/2] check STDERR below:', '<EMPTY STDERR>', ] yield pTidyAfterRun2, 'ignore', '', None, [ 'WARNING', '[2/2] failed but ignored (totally 1). Return code: 1 (Script error).', ]
def dataProvider_testSaveSettings(self): pSaveSettings = Proc() pSaveSettings.ppldir = self.testdir yield pSaveSettings, [ # '[brings]', '[channel]', 'value: []', '[depends]', 'procs: []', '[echo]', 'jobs: []', 'type: {"stderr": null, "stdout": null}', '[expart]', 'value_0: TemplateLiquid < >', '[expect]', 'value: TemplateLiquid < >', '[input]', '[output]', '[procvars]', 'args: {}', 'proc: {', '[rc]', 'value: [0]', '[runner]', 'value: local', '[script]', 'value:', ' "TemplateLiquid < #!/usr/bin/env bash >"', '[sets]', 'value: [\'ppldir\']', '[size]', 'value: 0', '[suffix]', 'value: ', '[template]', 'name: TemplateLiquid', '[workdir]', 'value: ', ] pSaveSettings1 = Proc() pSaveSettings1.ppldir = self.testdir infile1 = path.join(self.testdir, 'pSaveSettings1-in1.txt') infile2 = path.join(self.testdir, 'pSaveSettings1-in2.txt') brfile1 = path.join(self.testdir, 'pSaveSettings1-in1.br') brfile2 = path.join(self.testdir, 'pSaveSettings1-in2.br') helpers.writeFile(infile1) helpers.writeFile(infile2) helpers.writeFile(brfile1) helpers.writeFile(brfile2) pSaveSettings1.input = { 'a': 1, 'b:file': [infile1, infile2], 'c:files': [[infile1, infile2]] } #pSaveSettings1.brings = {'b': '{{fn(i.b)}}.br'} pSaveSettings1.output = 'out:file:{{fn(i.b)}}-{{i.a}}.out' pSaveSettings1.echo = {'jobs': [0, 1]} pSaveSettings1.expart = '*-1.out' pSaveSettings1.expect = 'grep 1 {{o.out}}' pSaveSettings1.args.a = 'a' pSaveSettings1.rc = '0,1' pSaveSettings1.script = 'echo {{i.a}} > {{o.out}}' pSaveSettings1.template = 'jinja2' if helpers.moduleInstalled('jinja2'): yield pSaveSettings1, [ #'[brings]', #'b: [\'TemplateJinja2 < {{fn(i.b)}}.br >\']', '[channel]', 'value: []', '[depends]', 'procs: []', '[echo]', 'jobs: [0, 1]', 'type: {"stderr": null, "stdout": null}', '[expart]', 'value_0: TemplateJinja2 < *-1.out >', '[expect]', 'value: TemplateJinja2 < grep 1 {{o.out}} >', '[input]', 'a.type: var', 'a.data#0', ' 1', 'a.data#1', ' 1', 'b.type: file', 'b.data#0', 'pSaveSettings1-in1.txt', 'b.data#1', 'pSaveSettings1-in2.txt', '[output]', 'out.type: file', 'out.data: TemplateJinja2 < {{fn(i.b)}}-{{i.a}}.out >', '[procvars]', 'args: {"a": "a"}', 'proc: {', '[rc]', 'value: [0, 1]', '[runner]', 'value: local', '[script]', 'value:', ' "TemplateJinja2 <<<"', ' "\\t#!/usr/bin/env bash"', ' "\\techo {{i.a}} > {{o.out}}"', ' ">>>"', '[sets]', 'value: [\'ppldir\', \'input\', \'output\', \'echo\', \'expart\', \'expect\', \'rc\', \'script\', \'template\']', '[size]', 'value: 2', '[suffix]', 'value: ', '[template]', 'name: TemplateJinja2', '[workdir]', 'value: ', ]
def dataProvider_testInit(self): yield { '_log': { 'file': False } }, None, {}, { 'theme': 'default' }, [], ['PYPPL', 'TIPS'] yield { 'default': { 'forks': 8 }, '_log': { 'file': False } }, None, { 'default': { 'forks': 8 } }, { 'theme': 'default' }, [], ['PYPPL', 'TIPS'] # default conf files if helpers.moduleInstalled('yaml'): ymlfile = path.join(self.testdir, 'config.yaml') helpers.writeFile(ymlfile, ['default:', ' forks: 10']) j1file = path.join(self.testdir, 'config1.json') helpers.writeFile(j1file, '{"default": {"forks": 8}}') j2file = path.join(self.testdir, 'config2.json') helpers.writeFile(j2file, '{"default": {"forks": 6}}') logfile = path.join(self.testdir, 'init.log') yield { '_flowchart': { 'theme': 'dark' }, '_log': { 'file': False } }, None, { 'default': { 'forks': 8 } }, { 'theme': 'dark' }, [j1file] yield { '_log': { 'file': False } }, None, { 'default': { 'forks': 6 } }, { 'theme': 'default' }, [j1file, j2file] yield { '_log': { 'file': False } }, None, { 'default': { 'forks': 8 } }, { 'theme': 'default' }, [j2file, j1file] yield { '_log': { 'file': False } }, j1file, { 'default': { 'forks': 8 } }, { 'theme': 'default' }, [j2file] yield { 'default': { 'forks': 4 }, '_log': { 'file': False } }, j1file, { 'default': { 'forks': 4 } }, { 'theme': 'default' }, [j2file] if helpers.moduleInstalled('yaml'): yield { '_log': { 'file': False } }, ymlfile, { 'default': { 'forks': 10 } }, { 'theme': 'default' }, [j2file, j1file] yield { 'default': { 'forks': 4 }, '_log': { 'file': False } }, ymlfile, { 'default': { 'forks': 4 } }, { 'theme': 'default' }, [j2file, j1file] yield { 'default': { 'forks': 4 }, '_log': { 'file': False } }, j1file, { 'default': { 'forks': 4 } }, { 'theme': 'default' }, [j2file, ymlfile]
for block in blocks: plainText += decryptBlock(block, key, IV) IV = block # IV becomes current ciphertext return unpad(plainText) ''' Decrypt block via cbc. ''' def decryptBlock(block, key, IV): return XOR(Fk(block, key, False), IV) def test(): key = 'abcdefghijklmnopqrstuvwxyz123456' m = bytes('Attack at dawn! Attack at dawn! Attack at dawn! Attack at dawn! Attack at dawn! ', 'utf8') cipherText = encrypt(m, key) print(cipherText) plainText = decrypt(cipherText, key) print(plainText) # usage python cbc.py <e|d> inputFile outputFile keyFile [IVFile] if __name__ == "__main__": enc, input, key, iv = readFiles(sys.argv) if enc: output = encrypt(input, key, iv) else: output = decrypt(input, key) writeFile(output, sys.argv)
def dataProvider_testFromPattern(self): # create files testdir = path.join(self.testdir, 'testFromPattern') makedirs(testdir) file1 = path.join(testdir, 'testFromPattern1_File.ext1') # 1 file file2 = path.join(testdir, 'testFromPattern2_Link.ext1') # 2 link 1 file3 = path.join(testdir, 'testFromPattern3_File.ext1') # 3 file file4 = path.join(testdir, 'testFromPattern4_Link.ext1') # 4 link 3 file5 = path.join(testdir, 'testFromPattern5_FDir.ext1') # 5 dir file6 = path.join(testdir, 'testFromPattern6_FDir.ext2') # 6 dir file7 = path.join(testdir, 'testFromPattern7_Link.ext2') # 7 link 5 file8 = path.join(testdir, 'testFromPattern8_Link.ext2') # 8 link 6 file9 = path.join(testdir, 'testFromPattern9_File.ext2') # 9 file file0 = path.join(testdir, 'testFromPattern0_FDir.ext2') # 0 dir t = time() - 10 helpers.writeFile(file9, '1') utime(file9, (t, t)) helpers.writeFile(file3, '111') utime(file3, (t + 1, t + 1)) helpers.writeFile(file1, '11') utime(file1, (t + 2, t + 2)) makedirs(file0) utime(file0, (t + 3, t + 3)) makedirs(file5) utime(file5, (t + 4, t + 4)) makedirs(file6) utime(file6, (t + 5, t + 5)) symlink(file5, file7) symlink(file6, file8) symlink(file3, file4) symlink(file1, file2) pattern = path.join(testdir, '*') yield pattern, Channel.create([ file0, file1, file2, file3, file4, file5, file6, file7, file8, file9 ]) pattern = path.join(testdir, '*.ext2') yield pattern, Channel.create([file0, file6, file7, file8, file9]) pattern = path.join(testdir, '*') t = 'file' yield pattern, Channel.create([file1, file3, file9]), t pattern = path.join(testdir, '*') t = 'dir' yield pattern, Channel.create([file0, file5, file6]), t pattern = path.join(testdir, '*') t = 'link' yield pattern, Channel.create([file2, file4, file7, file8]), t pattern = path.join(testdir, 'testFromPattern?_F*.*') sortby = 'mtime' t = 'any' yield pattern, Channel.create( [file9, file3, file1, file0, file5, file6]), t, sortby pattern = path.join(testdir, 'testFromPattern?_F*.*') sortby = 'size' t = 'file' rev = True yield pattern, Channel.create([file3, file1, file9]), t, sortby, rev
tar = Target(args[2], args[3]) elif (args[1] == "sol"): ## arg[2] - key.txt - User can give name of different file. It should be in data folder ## arg[3] - ciphertex.txt - User can give name of different file. It should be in data folder ## arg[4] - result.txt - User can give name of different file. It should be in data folder sol = Solution(args[2], args[3], args[4]) elif (args[1] == "verify"): Verify(args[2], args[3], args[4]) elif (args[1] == "tests"): writeFile("solution_list.txt", "") try: if not args[2]: args.append(1) except: args.append(1) try: if not args[3]: args.append(27) except: args.append(27) try: if not args[4]:
def __init__(self, d=2, target_txt_path='target.txt'): difficulty = int(d) target = "0" * difficulty + "1" * (256 - difficulty) print(target) writeFile(target_txt_path, target)
def dataProvider_testCheckCached(self): pCheckCached = Proc() pCheckCached.props['cache'] = False yield pCheckCached, False, [ 'DEBUG', 'Not cached, because proc.cache is False' ] # 1 all cached pCheckCached1 = Proc() pCheckCached1.ppldir = self.testdir pCheckCached1.input = {'a': [1,2]} with helpers.log2str(): pCheckCached1._tidyBeforeRun() for job in pCheckCached1.jobs: job.cache() yield pCheckCached1, True, [ 'INFO', 'Truly cached jobs : ALL', 'Export-cached jobs: []' ] # 2 all export cached pCheckCached2 = Proc() pCheckCached2.ppldir = self.testdir pCheckCached2.input = {'a': [1,2]} pCheckCached2.output = 'a:file:{{in.a}}.txt' pCheckCached2.cache = 'export' pCheckCached2.exdir = self.testdir with helpers.log2str(): pCheckCached2._tidyBeforeRun() for i, job in enumerate(pCheckCached2.jobs): helpers.writeFile(job.rcfile, 0) helpers.writeFile(path.join(job.outdir, str(i+1) + '.txt')) helpers.writeFile(path.join(self.testdir, str(i+1) + '.txt')) yield pCheckCached2, True, [ 'INFO', 'Truly cached jobs : []', 'Export-cached jobs: ALL' ] # partially cached pCheckCached3 = Proc() pCheckCached3.ppldir = self.testdir pCheckCached3.input = {'a': [1,2]} with helpers.log2str(): pCheckCached3._tidyBeforeRun() pCheckCached3.jobs[0].cache() yield pCheckCached3, False, [ 'INFO', 'Truly cached jobs : 0', 'Export-cached jobs: []', 'Partly cached, only run non-cached 1 job(s).', 'DEBUG', 'Jobs to run: 1' ] # no jobs cached pCheckCached4 = Proc() pCheckCached4.ppldir = self.testdir pCheckCached4.input = {'a': [1,2]} with helpers.log2str(): pCheckCached4._tidyBeforeRun() yield pCheckCached4, False, [ 'DEBUG', 'Not cached, none of the jobs are cached.', ]
def clearMockQueue(): qsubQfile = path.join(__here__, 'mocks', 'qsub.queue.txt') sbatchQfile = path.join(__here__, 'mocks', 'sbatch.queue.txt') helpers.writeFile(qsubQfile, '') helpers.writeFile(sbatchQfile, '')