Esempio n. 1
0
def atlas_overlap(dset, atlas=None):
    '''aligns ``dset`` to the TT_N27 atlas and returns ``(cost,overlap)``'''
    atlas = find_atlas(atlas)
    if atlas == None:
        return None

    cost_func = 'crM'
    infile = os.path.abspath(dset)
    tmpdir = tempfile.mkdtemp()
    with nl.run_in(tmpdir):
        o = nl.run([
            '3dAllineate', '-verb', '-base', atlas, '-source', infile + '[0]',
            '-NN', '-final', 'NN', '-cost', cost_func, '-nmatch', '20%',
            '-onepass', '-fineblur', '2', '-cmass', '-prefix', 'test.nii.gz'
        ])
        m = re.search(r'Final\s+cost = ([\d.]+) ;', o.output)
        if m:
            cost = float(m.group(1))
        o = nl.run(['3dmaskave', '-mask', atlas, '-q', 'test.nii.gz'],
                   stderr=None)
        data_thresh = float(o.output) / 4
        i = nl.dset_info('test.nii.gz')
        o = nl.run([
            '3dmaskave', '-q', '-mask', 'SELF', '-sum',
            nl.calc([atlas, 'test.nii.gz'],
                    'equals(step(a-10),step(b-%.2f))' % data_thresh)
        ],
                   stderr=None)
        overlap = 100 * float(
            o.output) / (i.voxel_dims[0] * i.voxel_dims[1] * i.voxel_dims[2])
    try:
        shutil.rmtree(tmpdir)
    except:
        pass
    return (cost, overlap)
Esempio n. 2
0
def skullstrip_template(dset, template, prefix=None, suffix=None, dilate=0):
    '''Takes the raw anatomy ``dset``, aligns it to a template brain, and applies a templated skullstrip. Should produce fairly reliable skullstrips as long
    as there is a decent amount of normal brain and the overall shape of the brain is normal-ish'''
    if suffix == None:
        suffix = '_sstemplate'
    if prefix == None:
        prefix = nl.suffix(dset, suffix)
    if not os.path.exists(prefix):
        with nl.notify('Running template-based skull-strip on %s' % dset):
            dset = os.path.abspath(dset)
            template = os.path.abspath(template)
            tmp_dir = tempfile.mkdtemp()
            cwd = os.getcwd()
            with nl.run_in(tmp_dir):
                nl.affine_align(template,
                                dset,
                                skull_strip=None,
                                cost='mi',
                                opts=['-nmatch', '100%'])
                nl.run([
                    '3dQwarp', '-minpatch', '20', '-penfac', '10', '-noweight',
                    '-source',
                    nl.suffix(template, '_aff'), '-base', dset, '-prefix',
                    nl.suffix(template, '_qwarp')
                ],
                       products=nl.suffix(template, '_qwarp'))
                info = nl.dset_info(nl.suffix(template, '_qwarp'))
                max_value = info.subbricks[0]['max']
                nl.calc([dset, nl.suffix(template, '_qwarp')],
                        'a*step(b-%f*0.05)' % max_value, prefix)
                shutil.move(prefix, cwd)
            shutil.rmtree(tmp_dir)
Esempio n. 3
0
def commit_database(wait=True):
    '''database is stored as distributed jsons that are tracked by git -- this saves a new commit'''
    with nl.run_in(p.padre_root):
        if not os.path.exists('.git'):
            subprocess.check_call(['git','init'])
            with open('.gitignore','w') as f:
                f.write('\n'.join(_git_ignore))
        proc = subprocess.Popen(['git','add'] + glob.glob('Data/*/*.%s' % p.json_ext),stdout=subprocess.PIPE,stderr=subprocess.PIPE)
        proc.wait()
        proc = subprocess.Popen(['git','commit','-am','library commit'],stdout=subprocess.PIPE,stderr=subprocess.PIPE)
        if wait:
            proc.wait()
Esempio n. 4
0
def commit_database(wait=True):
    '''database is stored as distributed jsons that are tracked by git -- this saves a new commit'''
    with nl.run_in(p.padre_root):
        if not os.path.exists('.git'):
            subprocess.check_call(['git', 'init'])
            with open('.gitignore', 'w') as f:
                f.write('\n'.join(_git_ignore))
        proc = subprocess.Popen(['git', 'add'] +
                                glob.glob('Data/*/*.%s' % p.json_ext),
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE)
        proc.wait()
        proc = subprocess.Popen(['git', 'commit', '-am', 'library commit'],
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE)
        if wait:
            proc.wait()
Esempio n. 5
0
def reconstruct_files(input_dir):
    '''sorts ``input_dir`` and tries to reconstruct the subdirectories found'''
    input_dir = input_dir.rstrip('/')
    with nl.notify('Attempting to organize/reconstruct directory'):
        # Some datasets start with a ".", which confuses many programs
        for r,ds,fs in os.walk(input_dir):
            for f in fs:
                if f[0]=='.':
                    shutil.move(os.path.join(r,f),os.path.join(r,'i'+f))
        nl.dicom.organize_dir(input_dir)
        output_dir = '%s-sorted' % input_dir
        if os.path.exists(output_dir):
            with nl.run_in(output_dir):
                for dset_dir in os.listdir('.'):
                    with nl.notify('creating dataset from %s' % dset_dir):
                        nl.dicom.create_dset(dset_dir)
        else:
            nl.notify('Warning: failed to auto-organize directory %s' % input_dir,level=nl.level.warning)
Esempio n. 6
0
def reconstruct_files(input_dir):
    '''sorts ``input_dir`` and tries to reconstruct the subdirectories found'''
    input_dir = input_dir.rstrip('/')
    with nl.notify('Attempting to organize/reconstruct directory'):
        # Some datasets start with a ".", which confuses many programs
        for r, ds, fs in os.walk(input_dir):
            for f in fs:
                if f[0] == '.':
                    shutil.move(os.path.join(r, f), os.path.join(r, 'i' + f))
        nl.dicom.organize_dir(input_dir)
        output_dir = '%s-sorted' % input_dir
        if os.path.exists(output_dir):
            with nl.run_in(output_dir):
                for dset_dir in os.listdir('.'):
                    with nl.notify('creating dataset from %s' % dset_dir):
                        nl.dicom.create_dset(dset_dir)
        else:
            nl.notify('Warning: failed to auto-organize directory %s' %
                      input_dir,
                      level=nl.level.warning)
Esempio n. 7
0
def skullstrip_template(dset,template,prefix=None,suffix=None,dilate=0):
    '''Takes the raw anatomy ``dset``, aligns it to a template brain, and applies a templated skullstrip. Should produce fairly reliable skullstrips as long
    as there is a decent amount of normal brain and the overall shape of the brain is normal-ish'''
    if suffix==None:
        suffix = '_sstemplate'
    if prefix==None:
        prefix = nl.suffix(dset,suffix)
    if not os.path.exists(prefix):
        with nl.notify('Running template-based skull-strip on %s' % dset):
            dset = os.path.abspath(dset)
            template = os.path.abspath(template)
            tmp_dir = tempfile.mkdtemp()
            cwd = os.getcwd()
            with nl.run_in(tmp_dir):
                nl.affine_align(template,dset,skull_strip=None,cost='mi',opts=['-nmatch','100%'])
                nl.run(['3dQwarp','-minpatch','20','-penfac','10','-noweight','-source',nl.suffix(template,'_aff'),'-base',dset,'-prefix',nl.suffix(template,'_qwarp')],products=nl.suffix(template,'_qwarp'))
                info = nl.dset_info(nl.suffix(template,'_qwarp'))
                max_value = info.subbricks[0]['max']
                nl.calc([dset,nl.suffix(template,'_qwarp')],'a*step(b-%f*0.05)'%max_value,prefix)
                shutil.move(prefix,cwd)
            shutil.rmtree(tmp_dir)
Esempio n. 8
0
def unpack_archive(fname, out_dir):
    '''unpacks the archive file ``fname`` and reconstructs datasets into ``out_dir``
    
    Datasets are reconstructed and auto-named using :meth:`create_dset`. The raw directories
    that made the datasets are archive with the dataset name suffixed by ``tgz``, and any other
    files found in the archive are put into ``other_files.tgz``'''
    with nl.notify('Unpacking archive %s' % fname):
        tmp_dir = tempfile.mkdtemp()
        tmp_unpack = os.path.join(tmp_dir, 'unpack')
        os.makedirs(tmp_unpack)
        nl.utils.unarchive(fname, tmp_unpack)
        reconstruct_files(tmp_unpack)
        out_dir = os.path.abspath(out_dir)
        if not os.path.exists(out_dir):
            os.makedirs(out_dir)
        if not os.path.exists(tmp_unpack + '-sorted'):
            return
        with nl.run_in(tmp_unpack + '-sorted'):
            for fname in glob.glob('*.nii'):
                nl.run(['gzip', fname])
            for fname in glob.glob('*.nii.gz'):
                new_file = os.path.join(out_dir, fname)
                if not os.path.exists(new_file):
                    shutil.move(fname, new_file)
            raw_out = os.path.join(out_dir, 'raw')
            if not os.path.exists(raw_out):
                os.makedirs(raw_out)
            for rawdir in os.listdir('.'):
                rawdir_tgz = os.path.join(raw_out, rawdir + '.tgz')
                if not os.path.exists(rawdir_tgz):
                    with tarfile.open(rawdir_tgz, 'w:gz') as tgz:
                        tgz.add(rawdir)
        if len(os.listdir(tmp_unpack)) != 0:
            # There are still raw files left
            with tarfile.open(os.path.join(raw_out, 'other_files.tgz'),
                              'w:gz') as tgz:
                tgz.add(tmp_unpack)
    shutil.rmtree(tmp_dir)
Esempio n. 9
0
def unpack_archive(fname,out_dir):
    '''unpacks the archive file ``fname`` and reconstructs datasets into ``out_dir``

    Datasets are reconstructed and auto-named using :meth:`create_dset`. The raw directories
    that made the datasets are archive with the dataset name suffixed by ``tgz``, and any other
    files found in the archive are put into ``other_files.tgz``'''
    with nl.notify('Unpacking archive %s' % fname):
        tmp_dir = tempfile.mkdtemp()
        tmp_unpack = os.path.join(tmp_dir,'unpack')
        os.makedirs(tmp_unpack)
        nl.utils.unarchive(fname,tmp_unpack)
        reconstruct_files(tmp_unpack)
        out_dir = os.path.abspath(out_dir)
        if not os.path.exists(out_dir):
            os.makedirs(out_dir)
        if not os.path.exists(tmp_unpack+'-sorted'):
            return
        with nl.run_in(tmp_unpack+'-sorted'):
            for fname in glob.glob('*.nii'):
                nl.run(['gzip',fname])
            for fname in glob.glob('*.nii.gz'):
                new_file = os.path.join(out_dir,fname)
                if not os.path.exists(new_file):
                    shutil.move(fname,new_file)
            raw_out = os.path.join(out_dir,'raw')
            if not os.path.exists(raw_out):
                os.makedirs(raw_out)
            for rawdir in os.listdir('.'):
                rawdir_tgz = os.path.join(raw_out,rawdir+'.tgz')
                if not os.path.exists(rawdir_tgz):
                    with tarfile.open(rawdir_tgz,'w:gz') as tgz:
                        tgz.add(rawdir)
        if len(os.listdir(tmp_unpack))!=0:
            # There are still raw files left
            with tarfile.open(os.path.join(raw_out,'other_files.tgz'),'w:gz') as tgz:
                tgz.add(tmp_unpack)
    shutil.rmtree(tmp_dir)
Esempio n. 10
0
def import_archive(full_file,
                   subject_guess,
                   slice_order='alt+z',
                   sort_order='zt'):
    tmp_dir = tempfile.mkdtemp()
    try:
        padre_dir = full_file.replace(import_location, "PADRE/Import")
        tmp_location = os.path.join(tmp_dir, '_tmp_unarchive')
        out_dir = os.path.join(processed_location, subject_guess)
        for d in [tmp_location, tmp_location + '-sorted']:
            if os.path.exists(d):
                shutil.rmtree(d)
        import_log[padre_dir] = {'modified': os.path.getmtime(full_file)}
        if nl.is_archive(full_file):
            with nl.notify('uncompressing files...'):
                os.makedirs(tmp_location)
                nl.unarchive(full_file, tmp_location)
        else:
            with nl.notify('copying files...'):
                shutil.copytree(full_file, tmp_location)
        with nl.notify('sorting files...'):
            nl.dicom.organize_dir(tmp_location)
        dsets_made = {}
        if os.path.exists(tmp_location + '-sorted'):
            if not os.path.exists(os.path.join(out_dir, 'raw')):
                os.makedirs(os.path.join(out_dir, 'raw'))
            with nl.run_in(tmp_location + '-sorted'):
                for subdir in os.listdir('.'):
                    if 'dset_ignore_masks' in dir(c):
                        ignore = False
                        for m in c.dset_ignore_masks:
                            if m in subdir:
                                ignore = True
                        if ignore:
                            continue
                    nl.notify('creating dataset from %s' % subdir)
                    import_log[padre_dir][subdir] = {}
                    if not nl.dicom.create_dset(subdir, slice_order,
                                                sort_order):
                        import_log[padre_dir][subdir]['error'] = True
                    if os.path.exists(subdir + '.nii.gz'):
                        import_log[padre_dir][subdir]['complete'] = True
                        shutil.move(subdir + '.nii.gz',
                                    os.path.join(out_dir, subdir + '.nii.gz'))
                        session_name = subdir.split('-')[1]
                        if session_name not in dsets_made:
                            dsets_made[session_name] = []
                        dsets_made[session_name].append(
                            os.path.join(out_dir, subdir + '.nii.gz'))
                    if 'complete' in import_log[padre_dir][
                            subdir] and import_log[padre_dir][subdir][
                                'complete']:
                        if 'error' in import_log[padre_dir][
                                subdir] and import_log[padre_dir][subdir][
                                    'error']:
                            nl.notify(
                                'created dataset %s, but Dimon returned an error'
                                % (subdir + '.nii.gz'), nl.level.error)
                        else:
                            nl.notify('successfully created dataset %s' %
                                      (subdir + '.nii.gz'))
                    else:
                        nl.notify(
                            'failed to create dataset from directory %s' %
                            subdir,
                            level=nl.level.error)
            nl.notify('moving raw data...')
            raw_data = []
            for subdir in os.listdir(tmp_location + '-sorted'):
                out_file = os.path.join(out_dir, 'raw', subdir + '.tgz')
                if not os.path.exists(out_file):
                    try:
                        with tarfile.open(out_file, 'w:gz') as tgz:
                            tgz.add(
                                os.path.join(tmp_location + '-sorted', subdir))
                    except tarfile.TarError:
                        nl.notify(
                            'Error creating compressed raw directory %s' %
                            out_file,
                            level=nl.level.error)
                    else:
                        raw_data.append(subdir + '.tgz')
            for r, ds, fs in os.walk(tmp_location):
                for f in fs:
                    out_file = os.path.join(out_dir, 'raw', 'unsorted',
                                            r.lstrip(tmp_location).lstrip('/'),
                                            f)
                    if not os.path.exists(out_file):
                        if not os.path.exists(os.path.dirname(out_file)):
                            os.makedirs(os.path.dirname(out_file))
                        shutil.move(os.path.join(r, f), out_file)
                        raw_data.append(
                            os.path.join('unsorted',
                                         r.lstrip(tmp_location).lstrip('/'),
                                         f))
            nl.notify('importing into padre...')
            for session in dsets_made:
                p.maint.import_to_padre(subject_guess, session,
                                        dsets_made[session], raw_data, out_dir)
        else:
            nl.notify('didn\'t find any files...')
    finally:
        shutil.rmtree(tmp_dir, True)
    save_log()
    return dsets_made
Esempio n. 11
0
def _create_dset_dicom(directory,
                       slice_order='alt+z',
                       sort_order=None,
                       force_slices=None):
    tags = {
        'num_rows': (0x0028, 0x0010),
        'num_reps': (0x0020, 0x0105),
        'num_frames': (0x0028, 0x0008),
        'acq_time': (0x0008, 0x0032),
        'siemens_slices': (0x0019, 0x100a),
        'TR': (0x0018, 0x0080)
    }
    with nl.notify('Trying to create datasets from %s' % directory):
        directory = os.path.abspath(directory)

        if not os.path.exists(directory):
            nl.notify('Error: could not find %s' % directory,
                      level=nl.level.error)
            return False

        out_file = '%s.nii.gz' % nl.prefix(os.path.basename(directory))
        if os.path.exists(out_file):
            nl.notify('Error: file "%s" already exists!' % out_file,
                      level=nl.level.error)
            return False

        cwd = os.getcwd()
        sorted_dir = tempfile.mkdtemp()
        try:
            with nl.run_in(sorted_dir):
                file_list = glob.glob(directory + '/*')
                num_reps = None

                new_file_list = []
                for f in file_list:
                    try:
                        if len(info_for_tags(f, tags['num_rows'])) > 0:
                            # Only include DICOMs that actually have image information
                            new_file_list.append(f)
                    except:
                        pass
                file_list = new_file_list
                if len(file_list) == 0:
                    nl.notify('Error: Couldn\'t find any valid DICOM images',
                              level=nl.level.error)
                    return False

                with open('file_list.txt', 'w') as f:
                    f.write('\n'.join(file_list))
                try:
                    subprocess.check_output([
                        'Dimon', '-infile_list', 'file_list.txt', '-dicom_org',
                        '-save_details', 'details', '-max_images', '100000',
                        '-fast', '-no_wait', '-quit'
                    ],
                                            stderr=subprocess.STDOUT)
                except subprocess.CalledProcessError, e:
                    nl.notify(
                        'Warning: Dimon returned an error while sorting images',
                        level=nl.level.warning)
                else:
                    if os.path.exists('details.2.final_list.txt'):
                        with open('details.2.final_list.txt') as f:
                            details = [
                                x.strip().split() for x in f.readlines()
                                if x[0] != '#'
                            ]
                            file_list = [x[0] for x in details]
                    else:
                        nl.notify(
                            'Warning: Dimon didn\'t return expected output, unable to sort images',
                            level=nl.level.warning)
Esempio n. 12
0
def _create_dset_dicom(directory,slice_order='alt+z',sort_order=None,force_slices=None):
    tags = {
        'num_rows': (0x0028,0x0010),
        'num_reps': (0x0020,0x0105),
        'num_frames': (0x0028,0x0008),
        'acq_time': (0x0008,0x0032),
        'siemens_slices': (0x0019, 0x100a),
        'TR': (0x0018,0x0080)
    }
    with nl.notify('Trying to create datasets from %s' % directory):
        directory = os.path.abspath(directory)

        if not os.path.exists(directory):
            nl.notify('Error: could not find %s' % directory,level=nl.level.error)
            return False

        out_file = '%s.nii.gz' % nl.prefix(os.path.basename(directory))
        if os.path.exists(out_file):
            nl.notify('Error: file "%s" already exists!' % out_file,level=nl.level.error)
            return False

        cwd = os.getcwd()
        sorted_dir = tempfile.mkdtemp()
        try:
            with nl.run_in(sorted_dir):
                file_list = glob.glob(directory + '/*')
                num_reps = None

                new_file_list = []
                for f in file_list:
                    try:
                        if len(info_for_tags(f,tags['num_rows']))>0:
                            # Only include DICOMs that actually have image information
                            new_file_list.append(f)
                    except:
                        pass
                file_list = new_file_list
                if len(file_list)==0:
                    nl.notify('Error: Couldn\'t find any valid DICOM images',level=nl.level.error)
                    return False

                with open('file_list.txt','w') as f:
                    f.write('\n'.join(file_list))
                try:
                    subprocess.check_output([
                    'Dimon',
                    '-infile_list','file_list.txt',
                    '-dicom_org',
                    '-save_details','details',
                    '-max_images','100000',
                    '-fast','-no_wait',
                    '-quit'],stderr=subprocess.STDOUT)
                except subprocess.CalledProcessError, e:
                    nl.notify('Warning: Dimon returned an error while sorting images',level=nl.level.warning)
                else:
                    if os.path.exists('details.2.final_list.txt'):
                        with open('details.2.final_list.txt') as f:
                            details = [x.strip().split() for x in f.readlines() if x[0]!='#']
                            file_list = [x[0] for x in details]
                    else:
                        nl.notify('Warning: Dimon didn\'t return expected output, unable to sort images',level=nl.level.warning)
Esempio n. 13
0
def smooth_decon_to_fwhm(decon,fwhm,cache=True):
    '''takes an input :class:`Decon` object and uses ``3dBlurToFWHM`` to make the output as close as possible to ``fwhm``
    returns the final measured fwhm. If ``cache`` is ``True``, will save the blurred input file (and use it again in the future)'''
    if os.path.exists(decon.prefix):
        return
    blur_dset = lambda dset: nl.suffix(dset,'_smooth_to_%.2f' % fwhm)

    with nl.notify('Running smooth_decon_to_fwhm analysis (with %.2fmm blur)' % fwhm):
        tmpdir = tempfile.mkdtemp()
        try:
            cwd = os.getcwd()
            random_files = [re.sub(r'\[\d+\]$','',str(x)) for x in nl.flatten([x for x in decon.__dict__.values() if isinstance(x,basestring) or isinstance(x,list)]+[x.values() for x in decon.__dict__.values() if isinstance(x,dict)])]
            files_to_copy = [x for x in random_files if os.path.exists(x) and x[0]!='/']
            files_to_copy += [blur_dset(dset) for dset in decon.input_dsets if os.path.exists(blur_dset(dset))]
            # copy crap
            for file in files_to_copy:
                try:
                    shutil.copytree(file,tmpdir)
                except OSError as e:
                    shutil.copy(file,tmpdir)
                shutil.copy(file,tmpdir)

            copyback_files = [decon.prefix,decon.errts]
            with nl.run_in(tmpdir):
                if os.path.exists(decon.prefix):
                    os.remove(decon.prefix)

                # Create the blurred inputs (or load from cache)
                if cache and all([os.path.exists(os.path.join(cwd,blur_dset(dset))) for dset in decon.input_dsets]):
                    # Everything is already cached...
                    nl.notify('Using cache\'d blurred datasets')
                else:
                    # Need to make them from scratch
                    with nl.notify('Creating blurred datasets'):
                        old_errts = decon.errts
                        decon.errts = 'residual.nii.gz'
                        decon.prefix = os.path.basename(decon.prefix)
                        # Run once in place to get the residual dataset
                        decon.run()
                        running_reps = 0
                        for dset in decon.input_dsets:
                            info = nl.dset_info(dset)
                            residual_dset = nl.suffix(dset,'_residual')
                            nl.run(['3dbucket','-prefix',residual_dset,'%s[%d..%d]'%(decon.errts,running_reps,running_reps+info.reps-1)],products=residual_dset)
                            cmd = ['3dBlurToFWHM','-quiet','-input',dset,'-blurmaster',residual_dset,'-prefix',blur_dset(dset),'-FWHM',fwhm]
                            if decon.mask:
                                if decon.mask=='auto':
                                    cmd += ['-automask']
                                else:
                                    cmd += ['-mask',decon.mask]
                            nl.run(cmd,products=blur_dset(dset))
                            running_reps += info.reps
                            if cache:
                                copyback_files.append(blur_dset(dset))
                    decon.errts = old_errts
                decon.input_dsets = [blur_dset(dset) for dset in decon.input_dsets]
                for d in [decon.prefix,decon.errts]:
                    if os.path.exists(d):
                        try:
                            os.remove(d)
                        except:
                            pass
                decon.run()
                for copyfile in copyback_files:
                    if os.path.exists(copyfile):
                        shutil.copy(copyfile,cwd)
                    else:
                        nl.notify('Warning: deconvolve did not produce expected file %s' % decon.prefix,level=nl.level.warning)
        except:
            raise
        finally:
            shutil.rmtree(tmpdir,True)
Esempio n. 14
0
def smooth_decon_to_fwhm(decon,fwhm,cache=True):
    '''takes an input :class:`Decon` object and uses ``3dBlurToFWHM`` to make the output as close as possible to ``fwhm``
    returns the final measured fwhm. If ``cache`` is ``True``, will save the blurred input file (and use it again in the future)'''
    if os.path.exists(decon.prefix):
        return
    blur_dset = lambda dset: nl.suffix(dset,'_smooth_to_%.2f' % fwhm)

    with nl.notify('Running smooth_decon_to_fwhm analysis (with %.2fmm blur)' % fwhm):
        tmpdir = tempfile.mkdtemp()
        try:
            cwd = os.getcwd()
            random_files = [re.sub(r'\[\d+\]$','',str(x)) for x in nl.flatten([x for x in decon.__dict__.values() if isinstance(x,basestring) or isinstance(x,list)]+[x.values() for x in decon.__dict__.values() if isinstance(x,dict)])]
            files_to_copy = [x for x in random_files if os.path.exists(x) and x[0]!='/']
            files_to_copy += [blur_dset(dset) for dset in decon.input_dsets if os.path.exists(blur_dset(dset))]
            # copy crap
            for file in files_to_copy:
                try:
                    shutil.copytree(file,tmpdir)
                except OSError as e:
                    shutil.copy(file,tmpdir)
                shutil.copy(file,tmpdir)

            copyback_files = [decon.prefix,decon.errts]
            with nl.run_in(tmpdir):
                if os.path.exists(decon.prefix):
                    os.remove(decon.prefix)

                # Create the blurred inputs (or load from cache)
                if cache and all([os.path.exists(os.path.join(cwd,blur_dset(dset))) for dset in decon.input_dsets]):
                    # Everything is already cached...
                    nl.notify('Using cache\'d blurred datasets')
                else:
                    # Need to make them from scratch
                    with nl.notify('Creating blurred datasets'):
                        old_errts = decon.errts
                        decon.errts = 'residual.nii.gz'
                        decon.prefix = os.path.basename(decon.prefix)
                        # Run once in place to get the residual dataset
                        decon.run()
                        running_reps = 0
                        for dset in decon.input_dsets:
                            info = nl.dset_info(dset)
                            residual_dset = nl.suffix(dset,'_residual')
                            nl.run(['3dbucket','-prefix',residual_dset,'%s[%d..%d]'%(decon.errts,running_reps,running_reps+info.reps-1)],products=residual_dset)
                            cmd = ['3dBlurToFWHM','-quiet','-input',dset,'-blurmaster',residual_dset,'-prefix',blur_dset(dset),'-FWHM',fwhm]
                            if decon.mask:
                                if decon.mask=='auto':
                                    cmd += ['-automask']
                                else:
                                    cmd += ['-mask',decon.mask]
                            nl.run(cmd,products=blur_dset(dset))
                            running_reps += info.reps
                            if cache:
                                copyback_files.append(blur_dset(dset))
                    decon.errts = old_errts
                decon.input_dsets = [blur_dset(dset) for dset in decon.input_dsets]
                for d in [decon.prefix,decon.errts]:
                    if os.path.exists(d):
                        try:
                            os.remove(d)
                        except:
                            pass
                decon.run()
                for copyfile in copyback_files:
                    if os.path.exists(copyfile):
                        shutil.copy(copyfile,cwd)
                    else:
                        nl.notify('Warning: deconvolve did not produce expected file %s' % decon.prefix,level=nl.level.warning)
        except:
            raise
        finally:
            shutil.rmtree(tmpdir,True)