def test_copy_gzip(tmpdir): filepath = tmpdir / 'name1.txt' filepath2 = tmpdir / 'name2.txt' assert not filepath2.exists() open(str(filepath), 'w').close() check_call(['gzip', '-N', str(filepath)]) assert not filepath.exists() gzpath1 = '%s/%s' % (tmpdir, 'name1.txt.gz') gzpath2 = '%s/%s' % (tmpdir, 'name2.txt.gz') _copy_any(gzpath1, gzpath2) assert Path(gzpath2).exists() check_call(['gunzip', '-N', '-f', gzpath2]) assert not filepath.exists() assert filepath2.exists()
def test_copy_gzip(tmpdir): filepath = tmpdir / "name1.txt" filepath2 = tmpdir / "name2.txt" assert not filepath2.exists() open(str(filepath), "w").close() check_call(["gzip", "-N", str(filepath)]) assert not filepath.exists() gzpath1 = "%s/%s" % (tmpdir, "name1.txt.gz") gzpath2 = "%s/%s" % (tmpdir, "name2.txt.gz") _copy_any(gzpath1, gzpath2) assert Path(gzpath2).exists() check_call(["gunzip", "-N", "-f", gzpath2]) assert not filepath.exists() assert filepath2.exists()
def test_copy_gzip(): with TemporaryDirectory() as tmpdir: tmppath = Path(tmpdir) filepath = tmppath / 'name1.txt' filepath2 = tmppath / 'name2.txt' assert not filepath2.exists() open(str(filepath), 'w').close() check_call(['gzip', '-N', str(filepath)]) assert not filepath.exists() gzpath1 = '%s/%s' % (tmppath, 'name1.txt.gz') gzpath2 = '%s/%s' % (tmppath, 'name2.txt.gz') _copy_any(gzpath1, gzpath2) assert Path(gzpath2).exists() check_call(['gunzip', '-N', '-f', gzpath2]) assert not filepath.exists() assert filepath2.exists()
def main(): """Entry point""" from nipype import logging as nlogging from multiprocessing import set_start_method, Process, Manager from ..utils.bids import write_derivative_description, validate_input_dir set_start_method('forkserver') warnings.showwarning = _warn_redirect opts = get_parser().parse_args() exec_env = os.name # special variable set in the container if os.getenv('IS_DOCKER_8395080871'): exec_env = 'singularity' cgroup = Path('/proc/1/cgroup') if cgroup.exists() and 'docker' in cgroup.read_text(): exec_env = 'docker' if os.getenv('DOCKER_VERSION_8395080871'): exec_env = 'fmriprep-docker' sentry_sdk = None if not opts.notrack: import sentry_sdk from ..utils.sentry import sentry_setup sentry_setup(opts, exec_env) if opts.debug: print('WARNING: Option --debug is deprecated and has no effect', file=sys.stderr) # Validate inputs if not opts.skip_bids_validation: print( "Making sure the input data is BIDS compliant (warnings can be ignored in most " "cases).") validate_input_dir(exec_env, opts.bids_dir, opts.participant_label) # FreeSurfer license default_license = str(Path(os.getenv('FREESURFER_HOME')) / 'license.txt') # Precedence: --fs-license-file, $FS_LICENSE, default_license license_file = opts.fs_license_file or Path( os.getenv('FS_LICENSE', default_license)) if not license_file.exists(): raise RuntimeError("""\ ERROR: a valid license file is required for FreeSurfer to run. fMRIPrep looked for an existing \ license file at several paths, in this order: 1) command line argument ``--fs-license-file``; \ 2) ``$FS_LICENSE`` environment variable; and 3) the ``$FREESURFER_HOME/license.txt`` path. Get it \ (for free) by registering at https://surfer.nmr.mgh.harvard.edu/registration.html""" ) os.environ['FS_LICENSE'] = str(license_file.resolve()) # Retrieve logging level log_level = int(max(25 - 5 * opts.verbose_count, logging.DEBUG)) # Set logging logger.setLevel(log_level) nlogging.getLogger('nipype.workflow').setLevel(log_level) nlogging.getLogger('nipype.interface').setLevel(log_level) nlogging.getLogger('nipype.utils').setLevel(log_level) # Call build_workflow(opts, retval) with Manager() as mgr: retval = mgr.dict() p = Process(target=build_workflow, args=(opts, retval)) p.start() p.join() retcode = p.exitcode or retval.get('return_code', 0) bids_dir = Path(retval.get('bids_dir')) output_dir = Path(retval.get('output_dir')) work_dir = Path(retval.get('work_dir')) plugin_settings = retval.get('plugin_settings', None) subject_list = retval.get('subject_list', None) fmriprep_wf = retval.get('workflow', None) run_uuid = retval.get('run_uuid', None) if opts.reports_only: sys.exit(int(retcode > 0)) if opts.boilerplate: sys.exit(int(retcode > 0)) if fmriprep_wf and opts.write_graph: fmriprep_wf.write_graph(graph2use="colored", format='svg', simple_form=True) retcode = retcode or int(fmriprep_wf is None) if retcode != 0: sys.exit(retcode) # Check workflow for missing commands missing = check_deps(fmriprep_wf) if missing: print("Cannot run fMRIPrep. Missing dependencies:", file=sys.stderr) for iface, cmd in missing: print("\t{} (Interface: {})".format(cmd, iface)) sys.exit(2) # Clean up master process before running workflow, which may create forks gc.collect() # Sentry tracking if not opts.notrack: from ..utils.sentry import start_ping start_ping(run_uuid, len(subject_list)) errno = 1 # Default is error exit unless otherwise set try: fmriprep_wf.run(**plugin_settings) except Exception as e: if not opts.notrack: from ..utils.sentry import process_crashfile crashfolders = [ output_dir / 'fmriprep' / 'sub-{}'.format(s) / 'log' / run_uuid for s in subject_list ] for crashfolder in crashfolders: for crashfile in crashfolder.glob('crash*.*'): process_crashfile(crashfile) if "Workflow did not execute cleanly" not in str(e): sentry_sdk.capture_exception(e) logger.critical('fMRIPrep failed: %s', e) raise else: if opts.run_reconall: from templateflow import api from niworkflows.utils.misc import _copy_any dseg_tsv = str( api.get('fsaverage', suffix='dseg', extension=['.tsv'])) _copy_any(dseg_tsv, str(output_dir / 'fmriprep' / 'desc-aseg_dseg.tsv')) _copy_any(dseg_tsv, str(output_dir / 'fmriprep' / 'desc-aparcaseg_dseg.tsv')) errno = 0 logger.log(25, 'fMRIPrep finished without errors') if not opts.notrack: sentry_sdk.capture_message('fMRIPrep finished without errors', level='info') finally: from niworkflows.reports import generate_reports from subprocess import check_call, CalledProcessError, TimeoutExpired from pkg_resources import resource_filename as pkgrf from shutil import copyfile citation_files = { ext: output_dir / 'fmriprep' / 'logs' / ('CITATION.%s' % ext) for ext in ('bib', 'tex', 'md', 'html') } if citation_files['md'].exists(): # Generate HTML file resolving citations cmd = [ 'pandoc', '-s', '--bibliography', pkgrf('fmriprep', 'data/boilerplate.bib'), '--filter', 'pandoc-citeproc', '--metadata', 'pagetitle="fMRIPrep citation boilerplate"', str(citation_files['md']), '-o', str(citation_files['html']) ] logger.info( 'Generating an HTML version of the citation boilerplate...') try: check_call(cmd, timeout=10) except (FileNotFoundError, CalledProcessError, TimeoutExpired): logger.warning('Could not generate CITATION.html file:\n%s', ' '.join(cmd)) # Generate LaTex file resolving citations cmd = [ 'pandoc', '-s', '--bibliography', pkgrf('fmriprep', 'data/boilerplate.bib'), '--natbib', str(citation_files['md']), '-o', str(citation_files['tex']) ] logger.info( 'Generating a LaTeX version of the citation boilerplate...') try: check_call(cmd, timeout=10) except (FileNotFoundError, CalledProcessError, TimeoutExpired): logger.warning('Could not generate CITATION.tex file:\n%s', ' '.join(cmd)) else: copyfile(pkgrf('fmriprep', 'data/boilerplate.bib'), citation_files['bib']) else: logger.warning( 'fMRIPrep could not find the markdown version of ' 'the citation boilerplate (%s). HTML and LaTeX versions' ' of it will not be available', citation_files['md']) # Generate reports phase failed_reports = generate_reports(subject_list, output_dir, work_dir, run_uuid, packagename='fmriprep') write_derivative_description(bids_dir, output_dir / 'fmriprep') if failed_reports and not opts.notrack: sentry_sdk.capture_message( 'Report generation failed for %d subjects' % failed_reports, level='error') sys.exit(int((errno + failed_reports) > 0))
def main(): """Entry point.""" import os import sys import gc from multiprocessing import Process, Manager from .parser import parse_args from ..utils.bids import write_derivative_description parse_args() popylar = None if not config.execution.notrack: import popylar from ..__about__ import __ga_id__ config.loggers.cli.info( "Your usage of dmriprep is being recorded using popylar (https://popylar.github.io/). ", # noqa "For details, see https://nipreps.github.io/dmriprep/usage.html. ", "To opt out, call dmriprep with a `--notrack` flag") popylar.track_event(__ga_id__, 'run', 'cli_run') # CRITICAL Save the config to a file. This is necessary because the execution graph # is built as a separate process to keep the memory footprint low. The most # straightforward way to communicate with the child process is via the filesystem. config_file = config.execution.work_dir / '.dmriprep.toml' config.to_filename(config_file) # CRITICAL Call build_workflow(config_file, retval) in a subprocess. # Because Python on Linux does not ever free virtual memory (VM), running the # workflow construction jailed within a process preempts excessive VM buildup. with Manager() as mgr: from .workflow import build_workflow retval = mgr.dict() p = Process(target=build_workflow, args=(str(config_file), retval)) p.start() p.join() retcode = p.exitcode or retval.get('return_code', 0) dmriprep_wf = retval.get('workflow', None) # CRITICAL Load the config from the file. This is necessary because the ``build_workflow`` # function executed constrained in a process may change the config (and thus the global # state of dMRIPrep). config.load(config_file) if config.execution.reports_only: sys.exit(int(retcode > 0)) if dmriprep_wf and config.execution.write_graph: dmriprep_wf.write_graph(graph2use="colored", format='svg', simple_form=True) retcode = retcode or (dmriprep_wf is None) * os.EX_SOFTWARE if retcode != 0: sys.exit(retcode) # Generate boilerplate with Manager() as mgr: from .workflow import build_boilerplate p = Process(target=build_boilerplate, args=(str(config_file), dmriprep_wf)) p.start() p.join() if config.execution.boilerplate_only: sys.exit(int(retcode > 0)) # Clean up master process before running workflow, which may create forks gc.collect() if popylar is not None: popylar.track_event(__ga_id__, 'run', 'started') config.loggers.workflow.log( 15, '\n'.join(['dMRIPrep config:'] + ['\t\t%s' % s for s in config.dumps().splitlines()])) config.loggers.workflow.log(25, 'dMRIPrep started!') errno = 1 # Default is error exit unless otherwise set try: dmriprep_wf.run(**config.nipype.get_plugin()) except Exception as e: if not config.execution.notrack: popylar.track_event(__ga_id__, 'run', 'error') config.loggers.workflow.critical('dMRIPrep failed: %s', e) raise else: config.loggers.workflow.log(25, 'dMRIPrep finished successfully!') # Bother users with the boilerplate only iff the workflow went okay. if (config.execution.output_dir / 'dmriprep' / 'logs' / 'CITATION.md').exists(): config.loggers.workflow.log( 25, 'Works derived from this dMRIPrep execution should ' 'include the following boilerplate:\n\n%s', (config.execution.output_dir / 'dmriprep' / 'logs' / 'CITATION.md').read_text()) if config.workflow.run_reconall: from templateflow import api from niworkflows.utils.misc import _copy_any dseg_tsv = str( api.get('fsaverage', suffix='dseg', extension=['.tsv'])) _copy_any( dseg_tsv, str(config.execution.output_dir / 'dmriprep' / 'desc-aseg_dseg.tsv')) _copy_any( dseg_tsv, str(config.execution.output_dir / 'dmriprep' / 'desc-aparcaseg_dseg.tsv')) errno = 0 finally: from niworkflows.reports import generate_reports from pkg_resources import resource_filename as pkgrf # Generate reports phase failed_reports = generate_reports(config.execution.participant_label, config.execution.output_dir, config.execution.work_dir, config.execution.run_uuid, config=pkgrf( 'dmriprep', 'config/reports-spec.yml'), packagename='dmriprep') write_derivative_description(config.execution.bids_dir, config.execution.output_dir / 'dmriprep') if failed_reports and not config.execution.notrack: popylar.track_event(__ga_id__, 'run', 'reporting_error') sys.exit(int((errno + failed_reports) > 0))
def build_opts(opts): """Trigger a new process that builds the workflow graph, based on the input options.""" import os from pathlib import Path import logging import sys import gc import warnings from multiprocessing import set_start_method, Process, Manager from nipype import logging as nlogging from niworkflows.utils.misc import check_valid_fs_license set_start_method('forkserver') logging.addLevelName( 25, 'IMPORTANT') # Add a new level between INFO and WARNING logging.addLevelName(15, 'VERBOSE') # Add a new level between INFO and DEBUG logger = logging.getLogger('cli') def _warn_redirect(message, category, filename, lineno, file=None, line=None): logger.warning('Captured warning (%s): %s', category, message) warnings.showwarning = _warn_redirect # Precedence: --fs-license-file, $FS_LICENSE, default_license if opts.fs_license_file is not None: os.environ["FS_LICENSE"] = os.path.abspath(opts.fs_license_file) if not check_valid_fs_license(): raise RuntimeError( 'ERROR: a valid license file is required for FreeSurfer to run. ' 'sMRIPrep looked for an existing license file at several paths, in this ' 'order: 1) command line argument ``--fs-license-file``; 2) ``$FS_LICENSE`` ' 'environment variable; and 3) the ``$FREESURFER_HOME/license.txt`` path. ' 'Get it (for free) by registering at https://' 'surfer.nmr.mgh.harvard.edu/registration.html') # Retrieve logging level log_level = int(max(25 - 5 * opts.verbose_count, logging.DEBUG)) # Set logging logger.setLevel(log_level) nlogging.getLogger('nipype.workflow').setLevel(log_level) nlogging.getLogger('nipype.interface').setLevel(log_level) nlogging.getLogger('nipype.utils').setLevel(log_level) errno = 0 # Call build_workflow(opts, retval) with Manager() as mgr: retval = mgr.dict() p = Process(target=build_workflow, args=(opts, retval)) p.start() p.join() if p.exitcode != 0: sys.exit(p.exitcode) smriprep_wf = retval['workflow'] plugin_settings = retval['plugin_settings'] bids_dir = retval['bids_dir'] output_dir = retval['output_dir'] subject_list = retval['subject_list'] run_uuid = retval['run_uuid'] retcode = retval['return_code'] if smriprep_wf is None: sys.exit(1) if opts.write_graph: smriprep_wf.write_graph(graph2use="colored", format='svg', simple_form=True) if opts.reports_only: sys.exit(int(retcode > 0)) if opts.boilerplate: sys.exit(int(retcode > 0)) # Check workflow for missing commands missing = check_deps(smriprep_wf) if missing: print("Cannot run sMRIPrep. Missing dependencies:") for iface, cmd in missing: print("\t{} (Interface: {})".format(cmd, iface)) sys.exit(2) # Clean up master process before running workflow, which may create forks gc.collect() try: smriprep_wf.run(**plugin_settings) except RuntimeError: errno = 1 else: if opts.run_reconall: from templateflow import api from niworkflows.utils.misc import _copy_any dseg_tsv = str( api.get('fsaverage', suffix='dseg', extension=['.tsv'])) _copy_any( dseg_tsv, str(Path(output_dir) / 'smriprep' / 'desc-aseg_dseg.tsv')) _copy_any( dseg_tsv, str(Path(output_dir) / 'smriprep' / 'desc-aparcaseg_dseg.tsv')) logger.log(25, 'sMRIPrep finished without errors') finally: from niworkflows.reports import generate_reports from ..utils.bids import write_derivative_description logger.log(25, 'Writing reports for participants: %s', ', '.join(subject_list)) # Generate reports phase errno += generate_reports(subject_list, output_dir, run_uuid, packagename='smriprep') write_derivative_description(bids_dir, str(Path(output_dir) / 'smriprep')) sys.exit(int(errno > 0))
def main(): """Entry point.""" import os import sys import gc from multiprocessing import Process, Manager from .parser import parse_args from ..utils.bids import write_derivative_description parse_args() sentry_sdk = None if not config.execution.notrack: import sentry_sdk from ..utils.sentry import sentry_setup sentry_setup() # CRITICAL Save the config to a file. This is necessary because the execution graph # is built as a separate process to keep the memory footprint low. The most # straightforward way to communicate with the child process is via the filesystem. config_file = config.execution.work_dir / '.fmriprep.toml' config.to_filename(config_file) # CRITICAL Call build_workflow(config_file, retval) in a subprocess. # Because Python on Linux does not ever free virtual memory (VM), running the # workflow construction jailed within a process preempts excessive VM buildup. with Manager() as mgr: from .workflow import build_workflow retval = mgr.dict() p = Process(target=build_workflow, args=(str(config_file), retval)) p.start() p.join() retcode = p.exitcode or retval.get('return_code', 0) fmriprep_wf = retval.get('workflow', None) # CRITICAL Load the config from the file. This is necessary because the ``build_workflow`` # function executed constrained in a process may change the config (and thus the global # state of fMRIPrep). config.load(config_file) if config.execution.reports_only: sys.exit(int(retcode > 0)) if fmriprep_wf and config.execution.write_graph: fmriprep_wf.write_graph(graph2use="colored", format='svg', simple_form=True) retcode = retcode or (fmriprep_wf is None) * os.EX_SOFTWARE if retcode != 0: sys.exit(retcode) # Generate boilerplate with Manager() as mgr: from .workflow import build_boilerplate p = Process(target=build_boilerplate, args=(str(config_file), fmriprep_wf)) p.start() p.join() if config.execution.boilerplate_only: sys.exit(int(retcode > 0)) # Clean up master process before running workflow, which may create forks gc.collect() # Sentry tracking if sentry_sdk is not None: with sentry_sdk.configure_scope() as scope: scope.set_tag('run_uuid', config.execution.run_uuid) scope.set_tag('npart', len(config.execution.participant_label)) sentry_sdk.add_breadcrumb(message='fMRIPrep started', level='info') sentry_sdk.capture_message('fMRIPrep started', level='info') config.loggers.workflow.log( 15, '\n'.join(['fMRIPrep config:'] + ['\t\t%s' % s for s in config.dumps().splitlines()])) config.loggers.workflow.log(25, 'fMRIPrep started!') errno = 1 # Default is error exit unless otherwise set try: fmriprep_wf.run(**config.nipype.get_plugin()) except Exception as e: if not config.execution.notrack: from ..utils.sentry import process_crashfile crashfolders = [ config.execution.output_dir / 'fmriprep' / 'sub-{}'.format(s) / 'log' / config.execution.run_uuid for s in config.execution.participant_label ] for crashfolder in crashfolders: for crashfile in crashfolder.glob('crash*.*'): process_crashfile(crashfile) if "Workflow did not execute cleanly" not in str(e): sentry_sdk.capture_exception(e) config.loggers.workflow.critical('fMRIPrep failed: %s', e) raise else: config.loggers.workflow.log(25, 'fMRIPrep finished successfully!') if not config.execution.notrack: success_message = 'fMRIPrep finished without errors' sentry_sdk.add_breadcrumb(message=success_message, level='info') sentry_sdk.capture_message(success_message, level='info') # Bother users with the boilerplate only iff the workflow went okay. if (config.execution.output_dir / 'fmriprep' / 'logs' / 'CITATION.md').exists(): config.loggers.workflow.log( 25, 'Works derived from this fMRIPrep execution should ' 'include the following boilerplate:\n\n%s', (config.execution.output_dir / 'fmriprep' / 'logs' / 'CITATION.md').read_text()) if config.workflow.run_reconall: from templateflow import api from niworkflows.utils.misc import _copy_any dseg_tsv = str( api.get('fsaverage', suffix='dseg', extension=['.tsv'])) _copy_any( dseg_tsv, str(config.execution.output_dir / 'fmriprep' / 'desc-aseg_dseg.tsv')) _copy_any( dseg_tsv, str(config.execution.output_dir / 'fmriprep' / 'desc-aparcaseg_dseg.tsv')) errno = 0 finally: from niworkflows.reports import generate_reports from pkg_resources import resource_filename as pkgrf # Generate reports phase failed_reports = generate_reports(config.execution.participant_label, config.execution.output_dir, config.execution.work_dir, config.execution.run_uuid, config=pkgrf( 'fmriprep', 'data/reports-spec.yml'), packagename='fmriprep') write_derivative_description(config.execution.bids_dir, config.execution.output_dir / 'fmriprep') if failed_reports and not config.execution.notrack: sentry_sdk.capture_message( 'Report generation failed for %d subjects' % failed_reports, level='error') sys.exit(int((errno + failed_reports) > 0))
def main(): """Entry point.""" from os import EX_SOFTWARE from pathlib import Path import sys import gc from multiprocessing import Process, Manager from .parser import parse_args from ..utils.bids import write_derivative_description parse_args() sentry_sdk = None if not config.execution.notrack: import sentry_sdk from ..utils.sentry import sentry_setup sentry_setup() # CRITICAL Save the config to a file. This is necessary because the execution graph # is built as a separate process to keep the memory footprint low. The most # straightforward way to communicate with the child process is via the filesystem. config_file = config.execution.work_dir / f"config-{config.execution.run_uuid}.toml" config.to_filename(config_file) # CRITICAL Call build_workflow(config_file, retval) in a subprocess. # Because Python on Linux does not ever free virtual memory (VM), running the # workflow construction jailed within a process preempts excessive VM buildup. with Manager() as mgr: from .workflow import build_workflow retval = mgr.dict() p = Process(target=build_workflow, args=(str(config_file), retval)) p.start() p.join() retcode = p.exitcode or retval.get("return_code", 0) fmriprep_wf = retval.get("workflow", None) # CRITICAL Load the config from the file. This is necessary because the ``build_workflow`` # function executed constrained in a process may change the config (and thus the global # state of fMRIPrep). config.load(config_file) if config.execution.reports_only: sys.exit(int(retcode > 0)) if fmriprep_wf and config.execution.write_graph: fmriprep_wf.write_graph(graph2use="colored", format="svg", simple_form=True) retcode = retcode or (fmriprep_wf is None) * EX_SOFTWARE if retcode != 0: sys.exit(retcode) # Generate boilerplate with Manager() as mgr: from .workflow import build_boilerplate p = Process(target=build_boilerplate, args=(str(config_file), fmriprep_wf)) p.start() p.join() if config.execution.boilerplate_only: sys.exit(int(retcode > 0)) # Clean up master process before running workflow, which may create forks gc.collect() # Sentry tracking if sentry_sdk is not None: with sentry_sdk.configure_scope() as scope: scope.set_tag("run_uuid", config.execution.run_uuid) scope.set_tag("npart", len(config.execution.participant_label)) sentry_sdk.add_breadcrumb(message="fMRIPrep started", level="info") sentry_sdk.capture_message("fMRIPrep started", level="info") config.loggers.workflow.log( 15, "\n".join(["fMRIPrep config:"] + ["\t\t%s" % s for s in config.dumps().splitlines()]), ) config.loggers.workflow.log(25, "fMRIPrep started!") errno = 1 # Default is error exit unless otherwise set try: fmriprep_wf.run(**config.nipype.get_plugin()) except Exception as e: if not config.execution.notrack: from ..utils.sentry import process_crashfile crashfolders = [ config.execution.output_dir / "fmriprep" / "sub-{}".format(s) / "log" / config.execution.run_uuid for s in config.execution.participant_label ] for crashfolder in crashfolders: for crashfile in crashfolder.glob("crash*.*"): process_crashfile(crashfile) if "Workflow did not execute cleanly" not in str(e): sentry_sdk.capture_exception(e) config.loggers.workflow.critical("fMRIPrep failed: %s", e) raise else: config.loggers.workflow.log(25, "fMRIPrep finished successfully!") if not config.execution.notrack: success_message = "fMRIPrep finished without errors" sentry_sdk.add_breadcrumb(message=success_message, level="info") sentry_sdk.capture_message(success_message, level="info") # Bother users with the boilerplate only iff the workflow went okay. boiler_file = config.execution.output_dir / "fmriprep" / "logs" / "CITATION.md" if boiler_file.exists(): if config.environment.exec_env in ( "singularity", "docker", "fmriprep-docker", ): boiler_file = Path("<OUTPUT_PATH>") / boiler_file.relative_to( config.execution.output_dir) config.loggers.workflow.log( 25, "Works derived from this fMRIPrep execution should include the " f"boilerplate text found in {boiler_file}.", ) if config.workflow.run_reconall: from templateflow import api from niworkflows.utils.misc import _copy_any dseg_tsv = str( api.get("fsaverage", suffix="dseg", extension=[".tsv"])) _copy_any( dseg_tsv, str(config.execution.output_dir / "fmriprep" / "desc-aseg_dseg.tsv"), ) _copy_any( dseg_tsv, str(config.execution.output_dir / "fmriprep" / "desc-aparcaseg_dseg.tsv"), ) errno = 0 finally: from niworkflows.reports import generate_reports from pkg_resources import resource_filename as pkgrf # Generate reports phase failed_reports = generate_reports( config.execution.participant_label, config.execution.output_dir, config.execution.run_uuid, config=pkgrf("fmriprep", "data/reports-spec.yml"), packagename="fmriprep", ) write_derivative_description(config.execution.bids_dir, config.execution.output_dir / "fmriprep") if failed_reports and not config.execution.notrack: sentry_sdk.capture_message( "Report generation failed for %d subjects" % failed_reports, level="error", ) sys.exit(int((errno + failed_reports) > 0))
def _run_interface(self, runtime): if isdefined(self.inputs.meta_dict): meta = self.inputs.meta_dict # inputs passed in construction take priority meta.update(self._metadata) self._metadata = meta src_fname, _ = _splitext(self.inputs.source_file) src_fname, dtype = src_fname.rsplit('_', 1) _, ext = _splitext(self.inputs.in_file[0]) if self.inputs.compress is True and not ext.endswith('.gz'): ext += '.gz' elif self.inputs.compress is False and ext.endswith('.gz'): ext = ext[:-3] m = BIDS_NAME.search(src_fname) mod = Path(self.inputs.source_file).parent.name base_directory = runtime.cwd if isdefined(self.inputs.base_directory): base_directory = self.inputs.base_directory base_directory = Path(base_directory).resolve() out_path = base_directory / self.out_path_base / \ '{subject_id}'.format(**m.groupdict()) if m.groupdict().get('session_id') is not None: out_path = out_path / '{session_id}'.format(**m.groupdict()) out_path = out_path / '{}'.format(mod) out_path.mkdir(exist_ok=True, parents=True) base_fname = str(out_path / src_fname) allowed_entities = {} for key in self._allowed_entities: value = getattr(self.inputs, key) if value is not None and isdefined(value): allowed_entities[key] = '_%s-%s' % (key, value) formatbase = '{bname}{space}{desc}' + ''.join( [allowed_entities.get(s, '') for s in self._allowed_entities]) formatstr = formatbase + '{extra}{suffix}{dtype}{ext}' if len(self.inputs.in_file) > 1 and not isdefined( self.inputs.extra_values): formatstr = formatbase + '{suffix}{i:04d}{dtype}{ext}' space = '_space-{}'.format( self.inputs.space) if self.inputs.space else '' desc = '_desc-{}'.format(self.inputs.desc) if self.inputs.desc else '' suffix = '_{}'.format(self.inputs.suffix) if self.inputs.suffix else '' dtype = '' if not self.inputs.keep_dtype else ('_%s' % dtype) self._results['compression'] = [] self._results['fixed_hdr'] = [False] * len(self.inputs.in_file) for i, fname in enumerate(self.inputs.in_file): extra = '' if isdefined(self.inputs.extra_values): extra = '_{}'.format(self.inputs.extra_values[i]) out_file = formatstr.format( bname=base_fname, space=space, desc=desc, extra=extra, suffix=suffix, i=i, dtype=dtype, ext=ext, ) self._results['out_file'].append(out_file) self._results['compression'].append(_copy_any(fname, out_file)) is_nii = out_file.endswith('.nii') or out_file.endswith('.nii.gz') if self.inputs.check_hdr and is_nii: nii = nb.load(out_file) if not isinstance(nii, (nb.Nifti1Image, nb.Nifti2Image)): # .dtseries.nii are CIfTI2, therefore skip check return runtime hdr = nii.header.copy() curr_units = tuple([ None if u == 'unknown' else u for u in hdr.get_xyzt_units() ]) curr_codes = (int(hdr['qform_code']), int(hdr['sform_code'])) # Default to mm, use sec if data type is bold units = (curr_units[0] or 'mm', 'sec' if dtype == '_bold' else None) xcodes = (1, 1) # Derivative in its original scanner space if self.inputs.space: from templateflow.api import templates as _get_template_list STANDARD_SPACES = _get_template_list() xcodes = (4, 4) if self.inputs.space in STANDARD_SPACES \ else (2, 2) if curr_codes != xcodes or curr_units != units: self._results['fixed_hdr'][i] = True hdr.set_qform(nii.affine, xcodes[0]) hdr.set_sform(nii.affine, xcodes[1]) hdr.set_xyzt_units(*units) # Rewrite file with new header nii.__class__(np.array(nii.dataobj), nii.affine, hdr).to_filename(out_file) if len(self._results['out_file']) == 1: meta_fields = self.inputs.copyable_trait_names() self._metadata.update({ k: getattr(self.inputs, k) for k in meta_fields if k not in self._static_traits }) if self._metadata: sidecar = ( Path(self._results['out_file'][0]).parent / ('%s.json' % _splitext(self._results['out_file'][0])[0])) sidecar.write_text( dumps(self._metadata, sort_keys=True, indent=2)) self._results['out_meta'] = str(sidecar) return runtime
def main(): """Entry point""" from nipype import logging as nlogging from multiprocessing import set_start_method, Process, Manager from ..viz.reports import generate_reports from ..utils.bids import write_derivative_description set_start_method('forkserver') warnings.showwarning = _warn_redirect opts = get_parser().parse_args() exec_env = os.name # special variable set in the container if os.getenv('IS_DOCKER_8395080871'): exec_env = 'singularity' cgroup = Path('/proc/1/cgroup') if cgroup.exists() and 'docker' in cgroup.read_text(): exec_env = 'docker' if os.getenv('DOCKER_VERSION_8395080871'): exec_env = 'fmriprep-docker' sentry_sdk = None if not opts.notrack: import sentry_sdk from ..__about__ import __version__ environment = "prod" release = __version__ if not __version__: environment = "dev" release = "dev" elif bool(int(os.getenv('FMRIPREP_DEV', 0))) or ('+' in __version__): environment = "dev" def before_send(event, hints): # Filtering log messages about crashed nodes if 'logentry' in event and 'message' in event['logentry']: msg = event['logentry']['message'] if msg.startswith("could not run node:"): return None elif msg.startswith("Saving crash info to "): return None elif re.match("Node .+ failed to run on host .+", msg): return None if 'breadcrumbs' in event and isinstance(event['breadcrumbs'], list): fingerprints_to_propagate = [ 'no-disk-space', 'memory-error', 'permission-denied', 'keyboard-interrupt' ] for bc in event['breadcrumbs']: msg = bc.get('message', 'empty-msg') if msg in fingerprints_to_propagate: event['fingerprint'] = [msg] break return event sentry_sdk.init( "https://[email protected]/1137693", release=release, environment=environment, before_send=before_send) with sentry_sdk.configure_scope() as scope: scope.set_tag('exec_env', exec_env) if exec_env == 'fmriprep-docker': scope.set_tag('docker_version', os.getenv('DOCKER_VERSION_8395080871')) dset_desc_path = opts.bids_dir / 'dataset_description.json' if dset_desc_path.exists(): desc_content = dset_desc_path.read_bytes() scope.set_tag('dset_desc_sha256', hashlib.sha256(desc_content).hexdigest()) free_mem_at_start = round(psutil.virtual_memory().free / 1024**3, 1) scope.set_tag('free_mem_at_start', free_mem_at_start) scope.set_tag('cpu_count', cpu_count()) # Memory policy may have a large effect on types of errors experienced overcommit_memory = Path('/proc/sys/vm/overcommit_memory') if overcommit_memory.exists(): policy = { '0': 'heuristic', '1': 'always', '2': 'never' }.get(overcommit_memory.read_text().strip(), 'unknown') scope.set_tag('overcommit_memory', policy) if policy == 'never': overcommit_kbytes = Path('/proc/sys/vm/overcommit_memory') kb = overcommit_kbytes.read_text().strip() if kb != '0': limit = '{}kB'.format(kb) else: overcommit_ratio = Path( '/proc/sys/vm/overcommit_ratio') limit = '{}%'.format( overcommit_ratio.read_text().strip()) scope.set_tag('overcommit_limit', limit) else: scope.set_tag('overcommit_limit', 'n/a') else: scope.set_tag('overcommit_memory', 'n/a') scope.set_tag('overcommit_limit', 'n/a') for k, v in vars(opts).items(): scope.set_tag(k, v) # Validate inputs if not opts.skip_bids_validation: print( "Making sure the input data is BIDS compliant (warnings can be ignored in most " "cases).") validate_input_dir(exec_env, str(opts.bids_dir), opts.participant_label) # FreeSurfer license default_license = str(Path(os.getenv('FREESURFER_HOME')) / 'license.txt') # Precedence: --fs-license-file, $FS_LICENSE, default_license license_file = opts.fs_license_file or os.getenv('FS_LICENSE', default_license) if not os.path.exists(license_file): raise RuntimeError( 'ERROR: a valid license file is required for FreeSurfer to run. ' 'FMRIPREP looked for an existing license file at several paths, in this ' 'order: 1) command line argument ``--fs-license-file``; 2) ``$FS_LICENSE`` ' 'environment variable; and 3) the ``$FREESURFER_HOME/license.txt`` path. ' 'Get it (for free) by registering at https://' 'surfer.nmr.mgh.harvard.edu/registration.html') os.environ['FS_LICENSE'] = license_file # Retrieve logging level log_level = int(max(25 - 5 * opts.verbose_count, logging.DEBUG)) # Set logging logger.setLevel(log_level) nlogging.getLogger('nipype.workflow').setLevel(log_level) nlogging.getLogger('nipype.interface').setLevel(log_level) nlogging.getLogger('nipype.utils').setLevel(log_level) errno = 0 # Call build_workflow(opts, retval) with Manager() as mgr: retval = mgr.dict() p = Process(target=build_workflow, args=(opts, retval)) p.start() p.join() retcode = p.exitcode or retval.get('return_code', 0) bids_dir = retval.get('bids_dir') output_dir = retval.get('output_dir') work_dir = retval.get('work_dir') plugin_settings = retval.get('plugin_settings', None) subject_list = retval.get('subject_list', None) fmriprep_wf = retval.get('workflow', None) run_uuid = retval.get('run_uuid', None) if opts.reports_only: sys.exit(int(retcode > 0)) if opts.boilerplate: sys.exit(int(retcode > 0)) if fmriprep_wf and opts.write_graph: fmriprep_wf.write_graph(graph2use="colored", format='svg', simple_form=True) retcode = retcode or int(fmriprep_wf is None) if retcode != 0: sys.exit(retcode) # Check workflow for missing commands missing = check_deps(fmriprep_wf) if missing: print("Cannot run fMRIPrep. Missing dependencies:") for iface, cmd in missing: print("\t{} (Interface: {})".format(cmd, iface)) sys.exit(2) # Clean up master process before running workflow, which may create forks gc.collect() # Sentry tracking if not opts.notrack: with sentry_sdk.configure_scope() as scope: if run_uuid: scope.set_tag('run_uuid', run_uuid) if subject_list: scope.set_tag('npart', len(subject_list)) sentry_sdk.add_breadcrumb(message='fMRIPrep started', level='info') sentry_sdk.capture_message('fMRIPrep started', level='info') try: fmriprep_wf.run(**plugin_settings) except RuntimeError as e: errno = 1 if "Workflow did not execute cleanly" not in str(e): sentry_sdk.capture_exception(e) raise else: if opts.run_reconall: from templateflow import api from niworkflows.utils.misc import _copy_any dseg_tsv = str( api.get('fsaverage', suffix='dseg', extensions=['.tsv'])) _copy_any( dseg_tsv, str(Path(output_dir) / 'fmriprep' / 'desc-aseg_dseg.tsv')) _copy_any( dseg_tsv, str(Path(output_dir) / 'fmriprep' / 'desc-aparcaseg_dseg.tsv')) logger.log(25, 'fMRIPrep finished without errors') finally: # Generate reports phase errno += generate_reports(subject_list, output_dir, work_dir, run_uuid, sentry_sdk=sentry_sdk) write_derivative_description(bids_dir, str(Path(output_dir) / 'fmriprep')) if not opts.notrack and errno == 0: sentry_sdk.capture_message('fMRIPrep finished without errors', level='info') sys.exit(int(errno > 0))
def main(): """Entry point""" from nipype import logging as nlogging from multiprocessing import set_start_method, Process, Manager from ..utils.bids import write_derivative_description, validate_input_dir from ..__about__ import __ga_id__ set_start_method('forkserver') warnings.showwarning = _warn_redirect opts = get_parser().parse_args() exec_env = os.name if not opts.notrack: logger.info( "Your usage of dmriprep is being recorded using popylar (https://popylar.github.io/). ", # noqa "For details, see https://nipreps.github.io/dmriprep/usage.html. ", "To opt out, call dmriprep with a `--notrack` flag") import popylar popylar.track_event(__ga_id__, 'run', 'cli_run') # Validate inputs if not opts.skip_bids_validation: print( "Making sure the input data is BIDS compliant (warnings can be ignored in most " "cases).") validate_input_dir(exec_env, opts.bids_dir, opts.participant_label) # FreeSurfer license default_license = str(Path(os.getenv('FREESURFER_HOME')) / 'license.txt') # Precedence: --fs-license-file, $FS_LICENSE, default_license license_file = opts.fs_license_file or Path( os.getenv('FS_LICENSE', default_license)) if not license_file.exists(): raise RuntimeError("""\ ERROR: a valid license file is required for FreeSurfer to run. dMRIPrep looked for an existing \ license file at several paths, in this order: 1) command line argument ``--fs-license-file``; \ 2) ``$FS_LICENSE`` environment variable; and 3) the ``$FREESURFER_HOME/license.txt`` path. Get it \ (for free) by registering at https://surfer.nmr.mgh.harvard.edu/registration.html""" ) os.environ['FS_LICENSE'] = str(license_file.resolve()) # Retrieve logging level log_level = int(max(25 - 5 * opts.verbose_count, logging.DEBUG)) # Set logging logger.setLevel(log_level) nlogging.getLogger('nipype.workflow').setLevel(log_level) nlogging.getLogger('nipype.interface').setLevel(log_level) nlogging.getLogger('nipype.utils').setLevel(log_level) # Call build_workflow(opts, retval) with Manager() as mgr: retval = mgr.dict() p = Process(target=build_workflow, args=(opts, retval)) p.start() p.join() retcode = p.exitcode or retval.get('return_code', 0) bids_dir = Path(retval.get('bids_dir')) output_dir = Path(retval.get('output_dir')) work_dir = Path(retval.get('work_dir')) plugin_settings = retval.get('plugin_settings', None) subject_list = retval.get('subject_list', None) dmriprep_wf = retval.get('workflow', None) run_uuid = retval.get('run_uuid', None) if opts.reports_only: sys.exit(int(retcode > 0)) if opts.boilerplate: sys.exit(int(retcode > 0)) if dmriprep_wf and opts.write_graph: dmriprep_wf.write_graph(graph2use="colored", format='svg', simple_form=True) retcode = retcode or int(dmriprep_wf is None) if retcode != 0: sys.exit(retcode) # Check workflow for missing commands missing = check_deps(dmriprep_wf) if missing: print("Cannot run dMRIPrep. Missing dependencies:", file=sys.stderr) for iface, cmd in missing: print("\t{} (Interface: {})".format(cmd, iface)) sys.exit(2) # Clean up master process before running workflow, which may create forks gc.collect() errno = 1 # Default is error exit unless otherwise set try: dmriprep_wf.run(**plugin_settings) except Exception as e: if not opts.notrack: popylar.track_event(__ga_id__, 'run', 'cli_error') logger.critical('dMRIPrep failed: %s', e) raise e else: if opts.run_reconall: from templateflow import api from niworkflows.utils.misc import _copy_any dseg_tsv = str( api.get('fsaverage', suffix='dseg', extension=['.tsv'])) _copy_any(dseg_tsv, str(output_dir / 'dmriprep' / 'desc-aseg_dseg.tsv')) _copy_any(dseg_tsv, str(output_dir / 'dmriprep' / 'desc-aparcaseg_dseg.tsv')) errno = 0 logger.log(25, 'dMRIPrep finished without errors') if not opts.notrack: popylar.track_event(__ga_id__, 'run', 'cli_finished') finally: from niworkflows.reports import generate_reports from subprocess import check_call, CalledProcessError, TimeoutExpired from pkg_resources import resource_filename as pkgrf from shutil import copyfile citation_files = { ext: output_dir / 'dmriprep' / 'logs' / ('CITATION.%s' % ext) for ext in ('bib', 'tex', 'md', 'html') } if citation_files['md'].exists(): # Generate HTML file resolving citations cmd = [ 'pandoc', '-s', '--bibliography', pkgrf('dmriprep', 'data/boilerplate.bib'), '--filter', 'pandoc-citeproc', '--metadata', 'pagetitle="dMRIPrep citation boilerplate"', str(citation_files['md']), '-o', str(citation_files['html']) ] logger.info( 'Generating an HTML version of the citation boilerplate...') try: check_call(cmd, timeout=10) except (FileNotFoundError, CalledProcessError, TimeoutExpired): logger.warning('Could not generate CITATION.html file:\n%s', ' '.join(cmd)) # Generate LaTex file resolving citations cmd = [ 'pandoc', '-s', '--bibliography', pkgrf('dmriprep', 'data/boilerplate.bib'), '--natbib', str(citation_files['md']), '-o', str(citation_files['tex']) ] logger.info( 'Generating a LaTeX version of the citation boilerplate...') try: check_call(cmd, timeout=10) except (FileNotFoundError, CalledProcessError, TimeoutExpired): logger.warning('Could not generate CITATION.tex file:\n%s', ' '.join(cmd)) else: copyfile(pkgrf('dmriprep', 'data/boilerplate.bib'), citation_files['bib']) else: logger.warning( 'dMRIPrep could not find the markdown version of ' 'the citation boilerplate (%s). HTML and LaTeX versions' ' of it will not be available', citation_files['md']) # Generate reports phase failed_reports = generate_reports(subject_list, output_dir, work_dir, run_uuid, config=pkgrf( 'dmriprep', 'config/reports-spec.yml'), packagename='dmriprep') write_derivative_description(bids_dir, output_dir / 'dmriprep') sys.exit(int((errno + failed_reports) > 0))