def test_build(job0, tmpdir, caplog): job0.proc.input = {} job0.proc.output = {} job0.proc.cache = True job0.proc.script = TemplateLiquid('# script') fs.remove(job0.dir) assert job0.build() assert fs.isdir(job0.dir) assert not fs.exists(job0.dir / 'job.stdout.bak') assert not fs.exists(job0.dir / 'job.stderr.bak') (job0.dir / 'job.stdout').write_text('') (job0.dir / 'job.stderr').write_text('') assert job0.build() assert fs.exists(job0.dir / 'job.stdout.bak') assert fs.exists(job0.dir / 'job.stderr.bak') fs.remove(job0.dir / FILE_STDERR) fs.remove(job0.dir / FILE_STDOUT) job0._signature = None job0.rc = 0 job0.cache() assert job0.build() assert fs.isfile(job0.dir / FILE_STDERR) assert fs.isfile(job0.dir / FILE_STDOUT) job0._signature = None job0.rc = 0 job0.cache() assert job0.build() == 'cached' # raise exception while building del job0.proc.input assert not job0.build() assert 'KeyError' in (job0.dir / 'job.stderr').read_text()
def test_reest(job0): job0.ntry = 0 (job0.dir / 'output').mkdir() (job0.dir / 'output' / 'outfile.txt').write_text('') (job0.dir / 'output' / '.jobcache').mkdir() (job0.dir / 'job.rc').write_text('') (job0.dir / 'job.stdout').write_text('out') (job0.dir / 'job.stderr').write_text('err') (job0.dir / 'job.pid').write_text('') (job0.dir / 'retry.1').mkdir() job0.reset() assert not fs.exists(job0.dir / 'retry.1') assert not fs.exists(job0.dir / 'job.rc') # recreated assert (job0.dir / 'job.stdout').read_text() == '' assert (job0.dir / 'job.stderr').read_text() == '' assert not fs.exists(job0.dir / 'job.pid') assert fs.exists(job0.dir / 'output') # recreated assert not fs.exists(job0.dir / 'output' / 'outfile.txt') job0.ntry = 1 (job0.dir / 'output' / 'outfile.txt').write_text('') (job0.dir / 'output' / '.jobcache' / 'cached.txt').write_text('') job0.reset() assert fs.exists(job0.dir / 'retry.1') assert not fs.exists(job0.dir / 'retry.1' / '.jobcache') assert fs.exists(job0.dir / 'output' / '.jobcache' / 'cached.txt') # remove whole output directory job0.ntry = 0 fs.remove(job0.dir / 'output' / '.jobcache') (job0.dir / 'output' / 'outfile.txt').write_text('') job0.reset() assert not fs.exists(job0.dir / 'output' / 'outfile.txt') # move whole output directory job0.ntry = 1 fs.remove(job0.dir / 'output' / '.jobcache') (job0.dir / 'output' / 'outfile.txt').write_text('') job0.reset() assert not fs.exists(job0.dir / 'output' / 'outfile.txt') # restore output directory and stdout, stderr job0.output = OrderedDiot( outdir=('dir', job0.dir / 'output' / 'outdir'), outfile=('stdout', job0.dir / 'output' / 'outfile'), errfile=('stderr', job0.dir / 'output' / 'errfile'), ) job0.ntry = 0 job0.reset() assert fs.isdir(job0.dir / 'output' / 'outdir') assert fs.islink(job0.dir / 'output' / 'outfile') assert fs.islink(job0.dir / 'output' / 'errfile') assert fs.samefile(job0.dir / 'job.stdout', job0.dir / 'output' / 'outfile') assert fs.samefile(job0.dir / 'job.stderr', job0.dir / 'output' / 'errfile') # what if outdir exists job0.reset()
def test_gzip(tmpdir): tmpdir = Path(tmpdir) test1 = tmpdir / 'test1.gz' test2 = tmpdir / 'test2' test3 = tmpdir / 'test3' test2.write_text('1') with pytest.raises(OSError): fs.gzip(test1, test2, False) fs.gzip(test2, test1) assert fs.exists(test1) fs.gunzip(test1, test3) assert fs.exists(test3) assert test3.read_text() == '1' dir1 = tmpdir / 'dir1' dir2 = tmpdir / 'dir2' tgz = tmpdir / 'dir1.tgz' fs.mkdir(dir1) test3 = dir1 / 'test' test3.write_text('2') fs.gzip(dir1, tgz) assert fs.exists(tgz) fs.gunzip(tgz, dir2) assert fs.isdir(dir2) assert fs.exists(dir2 / 'test') with pytest.raises(OSError): fs.gunzip(tgz, dir2, False)
def job_done(job, status): # pylint: disable=too-many-branches """Export the output if job succeeded""" if status == 'failed' or not job.proc.config.export_dir: return # output files to export files2ex = [] # no partial export if (not job.proc.config.export_part or (len(job.proc.config.export_part) == 1 and not job.proc.config.export_part[0].render(job.data))): files2ex.extend( Path(outdata) for outtype, outdata in job.output.values() if outtype not in OUT_VARTYPE) else: for expart in job.proc.config.export_part: expart = expart.render(job.data) if expart in job.output: files2ex.append(Path(job.output[expart][1])) else: files2ex.extend(job.dir.joinpath('output').glob(expart)) files2ex = set(files2ex) for file2ex in files2ex: # don't export if file2ex does not exist # it might be a dead link # then job should fail if not file2ex.exists(): return # exported file exfile = job.proc.config.export_dir.joinpath(file2ex.name) if job.proc.config.export_how in EX_GZIP: exfile = exfile.with_suffix(exfile.suffix + '.tgz') \ if fs.isdir(file2ex) \ else exfile.with_suffix(exfile.suffix + '.gz') # If job is cached and exported file exists, skip exporting if status == 'cached' and exfile.exists(): continue with fs.lock(file2ex, exfile): if job.proc.config.export_how in EX_GZIP: fs.gzip(file2ex, exfile, overwrite=job.proc.config.export_ow) elif job.proc.config.export_how in EX_COPY: fs.copy(file2ex, exfile, overwrite=job.proc.config.export_ow) elif job.proc.config.export_how in EX_LINK: fs.link(file2ex, exfile, overwrite=job.proc.config.export_ow) else: # move if fs.islink(file2ex): fs.copy(file2ex, exfile, overwrite=job.proc.config.export_ow) else: fs.move(file2ex, exfile, overwrite=job.proc.config.export_ow) fs.link(exfile.resolve(), file2ex) job.logger('Exported: %s' % exfile, level='EXPORT', plugin='export')
def test_mkdir(tmpdir): tmpdir = Path(tmpdir) dir1 = tmpdir / 'testdir1' dir1.mkdir() with pytest.raises(OSError): fs.mkdir(dir1, False) fs.mkdir(dir1) assert fs.isdir(dir1)
def test_alias(tmpdir): assert fs.exists(tmpdir) assert fs.isdir(tmpdir) test1 = tmpdir / 'test1' test1.write_text('', encoding = 'utf-8') assert fs.isfile(test1) test2 = tmpdir / 'test2' if fs.exists(test2): test2.unlink() Path(test2).symlink_to(test1) assert fs.islink(test2)
def job_prebuild(job): """See if we can extract output from export directory""" if job.proc.cache != 'export' or not job.proc.config.export_dir: return if job.proc.config.export_how in EX_LINK: job.logger("Job is not export-cached using symlink export.", slevel="EXPORT_CACHE_USING_SYMLINK", level="warning", plugin="export") return if job.proc.config.export_part and \ job.proc.config.export_part[0].render(job.data): job.logger("Job is not export-cached using partial export.", slevel="EXPORT_CACHE_USING_EXPARTIAL", level="warning", plugin="export") return for outtype, outdata in job.output.values(): if outtype in OUT_VARTYPE: continue exfile = job.proc.config.export_dir / outdata.name if job.proc.config.export_how in EX_GZIP: exfile = (exfile.with_suffix(exfile.suffix + '.tgz') if fs.isdir(outdata) or outtype in OUT_DIRTYPE else exfile.with_suffix(exfile.suffix + '.gz')) with fs.lock(exfile, outdata): if not fs.exists(exfile): job.logger("Job is not export-cached since exported " "file not exists: %s" % exfile, slevel="EXPORT_CACHE_EXFILE_NOTEXISTS", level="debug", plugin="export") return if fs.exists(outdata): job.logger('Overwrite file for export-caching: %s' % outdata, slevel="EXPORT_CACHE_OUTFILE_EXISTS", level="warning", plugin="export") fs.gunzip(exfile, outdata) else: # exhow not gzip with fs.lock(exfile, outdata): if not fs.exists(exfile): job.logger("Job is not export-cached since " "exported file not exists: %s" % exfile, slevel="EXPORT_CACHE_EXFILE_NOTEXISTS", level="debug", plugin="export") return if fs.samefile(exfile, outdata): continue if fs.exists(outdata): job.logger("Overwrite file for " "export-caching: %s" % outdata, slevel="EXPORT_CACHE_OUTFILE_EXISTS", level="warning", plugin="export") fs.link(exfile.resolve(), outdata) job.rc = 0 job.cache()