def test_make_readonly(tmpdir): # we could test it all without torturing a poor file, but for going all # the way, let's do it on a file path = tmpdir.join('f') pathname = str(path) with open(pathname, 'w'): pass for orig, ro, rw in [ (0o600, 0o400, 0o600), # fully returned (0o624, 0o404, 0o606), # it will not get write bit where it is not readable (0o1777, 0o1555, 0o1777), # and other bits should be preserved ]: os.chmod(pathname, orig) assert not is_readonly(pathname) assert set_readonly(pathname) == ro assert is_readonly(pathname) assert stat.S_IMODE(os.lstat(pathname).st_mode) == ro # and it should go back if we set it back to non-read_only assert set_readonly(pathname, read_only=False) == rw assert not is_readonly(pathname)
def custom_callable(*args): """ Called at the end of `heudiconv.convert.convert()` to perform clean-up Checks to see if multiple "clean" output files were generated by ``heudiconv``. If so, assumes that this was because they had different echo times and tries to rename them and embed metadata from the relevant dicom files. This only needs to be done because the PPMI dicoms are a hot mess (cf. all the lists above with different series descriptions). """ import glob import re import pydicom as dcm import nibabel as nib import numpy as np from heudiconv.cli.run import get_parser from heudiconv.dicoms import embed_metadata_from_dicoms from heudiconv.utils import (load_json, TempDirs, treat_infofile, set_readonly) # unpack inputs and get command line arguments (again) # there's gotta be a better way to do this, but c'est la vie prefix, outtypes, item_dicoms = args[:3] outtype = outtypes[0] opts = get_parser().parse_args() # if you don't want BIDS format then you're going to have to rename outputs # on your own! if not opts.bids: return # do a crappy job of checking if multiple output files were generated # if we're only seeing one file, we're good to go # otherwise, we need to do some fun re-naming... res_files = glob.glob(prefix + '[1-9].' + outtype) if len(res_files) < 2: return # there are few a sequences with some weird stuff that causes >2 # files to be generated, some of which are two-dimensional (one slice) # we don't want that because that's nonsense, so let's design a check # for 2D files and just remove them for fname in res_files: if len([f for f in nib.load(fname).shape if f > 1]) < 3: os.remove(fname) os.remove(fname.replace(outtype, 'json')) res_files = [fname for fname in res_files if os.path.exists(fname)] bids_pairs = [(f, f.replace(outtype, 'json')) for f in res_files] # if there's only one file remaining don't add a needless 'echo' key # just rename the file and be done with it if len(bids_pairs) == 1: safe_movefile(bids_pairs[0][0], prefix + '.' + outtype) safe_movefile(bids_pairs[0][1], prefix + scaninfo_suffix) return # usually, at least two remaining files will exist # the main reason this happens with PPMI data is dual-echo sequences # look in the json files for EchoTime and generate a key based on that echonums = [load_json(json).get('EchoTime') for (_, json) in bids_pairs] if all([f is None for f in echonums]): return echonums = np.argsort(echonums) + 1 for echo, (nifti, json) in zip(echonums, bids_pairs): # create new prefix with echo specifier # this isn't *technically* BIDS compliant, yet, but we're making due... split = re.search(r'run-(\d+)_', prefix).end() new_prefix = (prefix[:split] + 'echo-%d_' % echo + prefix[split:]) outname, scaninfo = (new_prefix + '.' + outtype, new_prefix + scaninfo_suffix) # safely move files to new name safe_movefile(nifti, outname, overwrite=False) safe_movefile(json, scaninfo, overwrite=False) # embed metadata from relevant dicoms (i.e., with same echo number) dicoms = [ f for f in item_dicoms if isclose( float(dcm.read_file(f, force=True).EchoTime) / 1000, load_json(scaninfo).get('EchoTime')) ] prov_file = prefix + '_prov.ttl' if opts.with_prov else None embed_metadata_from_dicoms(opts.bids, dicoms, outname, new_prefix + '.json', prov_file, scaninfo, TempDirs(), opts.with_prov, opts.minmeta) # perform the bits of heudiconv.convert.convert that were never called if scaninfo and os.path.exists(scaninfo): lgr.info("Post-treating %s file", scaninfo) treat_infofile(scaninfo) if outname and os.path.exists(outname): set_readonly(outname)