def fetch_fsl_feeds_data(data_dir, redownload=False): ''' Function to fetch SPM auditory data. ''' url = ("http://fsl.fmrib.ox.ac.uk/fsldownloads/oldversions/" "fsl-4.1.0-feeds.tar.gz") subject_dir = data_dir archive_path = os.path.join(subject_dir, os.path.basename(url)) if redownload: try: print "Zapping all old downloads .." # shutil.rmtree(subject_dir) # os.remove(archive_path) except OSError: pass finally: print "Done." if os.path.exists(subject_dir): subject_data = _glob_fsl_feeds_data(subject_dir) if subject_data is None: # shutil.rmtree(subject_dir) return fetch_fsl_feeds_data(data_dir) else: return subject_data elif os.path.exists(archive_path): try: _uncompress_file(archive_path) except: print "Archive corrupted, trying to download it again." os.remove(archive_path) return fetch_fsl_feeds_data(data_dir) else: _fetch_file(url, data_dir) try: _uncompress_file(archive_path) except: print "Archive corrupted, trying to download it again." os.remove(archive_path) return fetch_fsl_feeds_data(data_dir) return _glob_fsl_feeds_data(subject_dir)
def fetch_spm_auditory_data(data_dir, redownload=False): ''' Function to fetch SPM auditory data. ''' url = "ftp://ftp.fil.ion.ucl.ac.uk/spm/data/MoAEpilot/MoAEpilot.zip" subject_dir = data_dir archive_path = os.path.join(subject_dir, os.path.basename(url)) if redownload: try: print "Zapping all old downloads .." # shutil.rmtree(subject_dir) # os.remove(archive_path) except OSError: pass finally: print "Done." if os.path.exists(subject_dir): subject_data = _glob_spm_auditory_data(subject_dir) if subject_data is None: # shutil.rmtree(subject_dir) return fetch_spm_auditory_data(data_dir) else: return subject_data elif os.path.exists(archive_path): try: _uncompress_file(archive_path) except: print "Archive corrupted, trying to download it again." # os.remove(archive_path) return fetch_spm_auditory_data(data_dir) else: _fetch_file(url, data_dir) try: _uncompress_file(archive_path) except: print "Archive corrupted, trying to download it again." # os.remove(archive_path) return fetch_spm_auditory_data(data_dir) return _glob_spm_auditory_data(subject_dir)
def fetch_haxby_subject_data(data_dir, subject_id, url, redownload=False): archive_name = os.path.basename(url) archive_path = os.path.join(data_dir, archive_name) subject_dir = os.path.join(data_dir, subject_id) if redownload: try: print "Zapping all old downloads .." # shutil.rmtree(subject_dir) # os.remove(archive_path) except OSError: pass finally: print "Done." if os.path.exists(subject_dir): subject_data = _glob_haxby_subject_data(subject_dir) if subject_data is None: # shutil.rmtree(subject_dir) return fetch_haxby_subject_data(data_dir, subject_id, url) else: return subject_id, subject_data elif os.path.exists(archive_path): try: _uncompress_file(archive_path) except: print "Archive corrupted, trying to download it again." os.remove(archive_path) return fetch_haxby_subject_data(data_dir, subject_id, url) else: _fetch_file(url, data_dir) try: _uncompress_file(archive_path) except: print "Archive corrupted, trying to download it again." os.remove(archive_path) return fetch_haxby_subject_data(data_dir, subject_id, url) return subject_id, _glob_haxby_subject_data(subject_dir)
if __name__ == '__main__': if len(sys.argv) < 4: print ("Usage: python %s <data_root_dir> " "<preproc_root_dir> <glm_root_dir>" % sys.argv[0]) print ("Example:\r\npython %s ~/datasets/raw" " ~/datasets/preproc ~/datasets/glm") % sys.argv[0] sys.exit(1) root_dir, preproc_dir, glm_dir = sys.argv[1:] # download data data_dir = fetch_openfmri(FULL_ID, root_dir) # condition_key file in tarball is incomplete _fetch_file('https://openfmri.org/system/files/condition_key.txt', os.path.join(data_dir, SHORT_ID, 'models', MODEL_ID)) # this dataset does not contain contrast definitions contrasts_file = '%s_task_contrasts.txt' % SHORT_ID assert os.path.isfile(contrasts_file), \ "No contrasts file: %s" % contrasts_file dest = os.path.join(data_dir, SHORT_ID, 'models', MODEL_ID, 'task_contrasts.txt') shutil.copy(contrasts_file, dest) # apply SPM preprocessing apply_preproc(SHORT_ID, data_dir, preproc_dir, ignore_list, dataset_description=DESCRIPTION) # prepare GLM (get data and design)