Exemplo n.º 1
0
Arquivo: bids.py Projeto: j1c/m2g
def s3_push_data(bucket, remote, outDir, modifier, creds=True):
    cmd = 'aws s3 cp --exclude "tmp/*" {} s3://{}/{}/{} --recursive --acl public-read'
    cmd = cmd.format(outDir, bucket, remote, modifier)
    if not creds:
        print("Note: no credentials provided, may fail to push big files.")
        cmd += ' --no-sign-request'
    mgu.execute_cmd(cmd)
Exemplo n.º 2
0
 def align_epi(self, epi, t1, brain, out):
     """
     Algins EPI images to T1w image
     """
     cmd = 'epi_reg --epi={} --t1={} --t1brain={} --out={}'
     cmd = cmd.format(epi, t1, brain, out)
     mgu.execute_cmd(cmd, verb=True)
def crawl_bucket(bucket, path, group=False):
    """
    Gets subject list for a given S3 bucket and path
    """
    if group:
        cmd = 'aws s3 ls s3://{}/{}/graphs/'.format(bucket, path)
        out, err = mgu.execute_cmd(cmd)
        atlases = re.findall('PRE (.+)/', out)
        print("Atlas IDs: " + ", ".join(atlases))
        return atlases
    else:
        cmd = 'aws s3 ls s3://{}/{}/'.format(bucket, path)
        out, err = mgu.execute_cmd(cmd)
        subjs = re.findall('PRE sub-(.+)/', out)
        cmd = 'aws s3 ls s3://{}/{}/sub-{}/'
        seshs = OrderedDict()
        for subj in subjs:
            out, err = mgu.execute_cmd(cmd.format(bucket, path, subj))
            sesh = re.findall('ses-(.+)/', out)
            seshs[subj] = sesh if sesh != [] else [None]
        print("Session IDs: " + ", ".join([subj+'-'+sesh if sesh is not None
                                           else subj
                                           for subj in subjs
                                           for sesh in seshs[subj]]))
        return seshs
def kill_jobs(jobdir, reason='"Killing job"'):
    """
    Given a list of jobs, kills them all.
    """
    cmd_template1 = 'aws batch cancel-job --job-id {} --reason {}'
    cmd_template2 = 'aws batch terminate-job --job-id {} --reason {}'

    print("Canelling/Terminating jobs in {}/ids/...".format(jobdir))
    jobs = os.listdir(jobdir+'/ids/')
    for job in jobs:
        with open('{}/ids/{}'.format(jobdir, job), 'r') as inf:
            submission = json.load(inf)
        jid = submission['jobId']
        name = submission['jobName']
        status = get_status(jobdir, jid)
        if status in ['SUCCEEDED', 'FAILED']:
            print("... No action needed for {}...".format(name))
        elif status in ['SUBMITTED', 'PENDING', 'RUNNABLE']:
            cmd = cmd_template1.format(jid, reason)
            print("... Cancelling job {}...".format(name))
            out, err = mgu.execute_cmd(cmd)
        elif status in ['STARTING', 'RUNNING']:
            cmd = cmd_template2.format(jid, reason)
            print("... Terminating job {}...".format(name))
            out, err = mgu.execute_cmd(cmd)
        else:
            print("... Unknown status??")
Exemplo n.º 5
0
def session_level(inDir, outDir, subjs, sesh=None, debug=False,
                      stc=None, dwi=True):
    """
    Crawls the given BIDS organized directory for data pertaining to the given
    subject and session, and passes necessary files to ndmg_pipeline for
    processing.
    """
    labels, atlas, atlas_mask, atlas_brain, lv_maks = get_atlas(atlas_dir, dwi)

    mgu.execute_cmd("mkdir -p {} {}/tmp".format(outDir, outDir))

    anat, func, dwi, bvec, bval = crawl_bids_directory(inDir, subjs, sesh)
    
    if dwi:
        assert(len(anat) == len(dwi))
        assert(len(bvec) == len(dwi))
        assert(len(bval) == len(dwi))
    else:
        assert(len(anat) == len(func))

    for i, scans in enumerate(anat):
        print("T1 file: {}".format(anat[i]))
        if dwi:
            print("DWI file: {}".format(dwi[i]))
            print("Bval file: {}".format(bval[i]))
            print("Bvec file: {}".format(bvec[i]))

            ndmg_dwi_pipeline(dwi[i], bval[i], bvec[i], anat[i], atlas,
                              atlas_mask, labels, outDir, clean=(not debug))
Exemplo n.º 6
0
def get_status(jobdir, jobid=None):
    """
    Given list of jobs, returns status of each.
    """
    cmd_template = 'aws batch describe-jobs --jobs {}'

    if jobid is None:
        print("Describing jobs in {}/ids/...".format(jobdir))
        jobs = os.listdir(jobdir + '/ids/')
        for job in jobs:
            with open('{}/ids/{}'.format(jobdir, job), 'r') as inf:
                submission = json.load(inf)
            cmd = cmd_template.format(submission['jobId'])
            print("... Checking job {}...".format(submission['jobName']))
            out, err = mgu.execute_cmd(cmd)
            status = re.findall('"status": "([A-Za-z]+)",', out)[0]
            print("... ... Status: {}".format(status))
        return 0
    else:
        print("Describing job id {}...".format(jobid))
        cmd = cmd_template.format(jobid)
        out, err = mgu.execute_cmd(cmd)
        status = re.findall('"status": "([A-Za-z]+)",', out)[0]
        print("... Status: {}".format(status))
        return status
def get_status(jobdir, jobid=None):
    """
    Given list of jobs, returns status of each.
    """
    cmd_template = 'aws batch describe-jobs --jobs {}'

    if jobid is None:
        print("Describing jobs in {}/ids/...".format(jobdir))
        jobs = os.listdir(jobdir+'/ids/')
        for job in jobs:
            with open('{}/ids/{}'.format(jobdir, job), 'r') as inf:
                submission = json.load(inf)
            cmd = cmd_template.format(submission['jobId'])
            print("... Checking job {}...".format(submission['jobName']))
            out, err = mgu.execute_cmd(cmd)
            status = re.findall('"status": "([A-Za-z]+)",', out)[0]
            print("... ... Status: {}".format(status))
        return 0
    else:
        print("Describing job id {}...".format(jobid))
        cmd = cmd_template.format(jobid)
        out, err = mgu.execute_cmd(cmd)
        status = re.findall('"status": "([A-Za-z]+)",', out)[0]
        print("... Status: {}".format(status))
        return status
Exemplo n.º 8
0
Arquivo: register.py Projeto: j1c/m2g
    def apply_warp(self, inp, out, ref, warp, xfm=None, mask=None):
        """
        Applies a warp from the functional to reference space
        in a single step, using information about the structural->ref
        mapping as well as the functional to structural mapping.

        **Positional Arguments:**

            inp:
                - the input image to be aligned as a nifti image file.
            out:
                - the output aligned image.
            ref:
                - the image being aligned to.
            warp:
                - the warp from the structural to reference space.
            premat:
                - the affine transformation from functional to
                structural space.
        """
        cmd = "applywarp --ref=" + ref + " --in=" + inp + " --out=" + out +\
              " --warp=" + warp
        if xfm is not None:
            cmd += " --premat=" + xfm
        if mask is not None:
            cmd += " --mask=" + mask
        mgu.execute_cmd(cmd, verb=True)
Exemplo n.º 9
0
def crawl_bucket(bucket, path, group=False):
    """
    Gets subject list for a given S3 bucket and path
    """
    if group:
        cmd = 'aws s3 ls s3://{}/{}/graphs/'.format(bucket, path)
        out, err = mgu.execute_cmd(cmd)
        atlases = re.findall('PRE (.+)/', out)
        print("Atlas IDs: " + ", ".join(atlases))
        return atlases
    else:
        cmd = 'aws s3 ls s3://{}/{}/'.format(bucket, path)
        out, err = mgu.execute_cmd(cmd)
        subjs = re.findall('PRE sub-(.+)/', out)
        cmd = 'aws s3 ls s3://{}/{}/sub-{}/'
        seshs = OrderedDict()
        for subj in subjs:
            out, err = mgu.execute_cmd(cmd.format(bucket, path, subj))
            sesh = re.findall('ses-(.+)/', out)
            seshs[subj] = sesh if sesh != [] else [None]
        print("Session IDs: " + ", ".join([
            subj + '-' + sesh if sesh is not None else subj for subj in subjs
            for sesh in seshs[subj]
        ]))
        return seshs
Exemplo n.º 10
0
    def apply_warp(self, inp, out, ref, warp, xfm=None, mask=None):
        """
        Applies a warp from the functional to reference space
        in a single step, using information about the structural->ref
        mapping as well as the functional to structural mapping.

        **Positional Arguments:**

            inp:
                - the input image to be aligned as a nifti image file.
            out:
                - the output aligned image.
            ref:
                - the image being aligned to.
            warp:
                - the warp from the structural to reference space.
            premat:
                - the affine transformation from functional to
                structural space.
        """
        cmd = "applywarp --ref=" + ref + " --in=" + inp + " --out=" + out +\
              " --warp=" + warp
        if xfm is not None:
            cmd += " --premat=" + xfm
        if mask is not None:
            cmd += " --mask=" + mask
        mgu.execute_cmd(cmd, verb=True)
Exemplo n.º 11
0
def kill_jobs(jobdir, reason='"Killing job"'):
    """
    Given a list of jobs, kills them all.
    """
    cmd_template1 = 'aws batch cancel-job --job-id {} --reason {}'
    cmd_template2 = 'aws batch terminate-job --job-id {} --reason {}'

    print("Canelling/Terminating jobs in {}/ids/...".format(jobdir))
    jobs = os.listdir(jobdir + '/ids/')
    for job in jobs:
        with open('{}/ids/{}'.format(jobdir, job), 'r') as inf:
            submission = json.load(inf)
        jid = submission['jobId']
        name = submission['jobName']
        status = get_status(jobdir, jid)
        if status in ['SUCCEEDED', 'FAILED']:
            print("... No action needed for {}...".format(name))
        elif status in ['SUBMITTED', 'PENDING', 'RUNNABLE']:
            cmd = cmd_template1.format(jid, reason)
            print("... Cancelling job {}...".format(name))
            out, err = mgu.execute_cmd(cmd)
        elif status in ['STARTING', 'RUNNING']:
            cmd = cmd_template2.format(jid, reason)
            print("... Terminating job {}...".format(name))
            out, err = mgu.execute_cmd(cmd)
        else:
            print("... Unknown status??")
Exemplo n.º 12
0
Arquivo: register.py Projeto: j1c/m2g
 def align_epi(self, epi, t1, brain, out):
     """
     Algins EPI images to T1w image
     """
     cmd = 'epi_reg --epi={} --t1={} --t1brain={} --out={}'
     cmd = cmd.format(epi, t1, brain, out)
     mgu.execute_cmd(cmd, verb=True)
Exemplo n.º 13
0
Arquivo: ndmg_bids.py Projeto: j1c/m2g
def group_level(inDir,
                outDir,
                dataset=None,
                atlas=None,
                minimal=False,
                log=False,
                hemispheres=False,
                dwi=True):
    """
    Crawls the output directory from ndmg and computes qc metrics on the
    derivatives produced
    """
    if not dwi:
        print("Currently there is no group level analysis for fmri.")
        return -1

    outDir = op.join(outDir, 'qa', 'graphs')
    mgu.execute_cmd("mkdir -p {}".format(outDir))

    labels_used = next(os.walk(inDir))[1]

    if atlas is not None:
        labels_used = [atlas]

    for skip in skippers:
        if skip in labels_used:
            print("Skipping {} parcellation".format(skip))
            labels_used.remove(skip)
            continue

    for label in labels_used:
        print("Parcellation: {}".format(label))
        tmp_in = op.join(inDir, label)
        fs = [
            op.join(tmp_in, fl) for root, dirs, files in os.walk(tmp_in)
            for fl in files if fl.endswith(".graphml")
            or fl.endswith(".gpickle") or fl.endswith('edgelist')
        ]
        tmp_out = op.join(outDir, label)
        mgu.execute_cmd("mkdir -p {}".format(tmp_out))
        try:
            compute_metrics(fs, tmp_out, label)
            outf = op.join(tmp_out, '{}_plot'.format(label))
            make_panel_plot(tmp_out,
                            outf,
                            dataset=dataset,
                            atlas=label,
                            minimal=minimal,
                            log=log,
                            hemispheres=hemispheres)
        except Exception as e:
            print("Failed group analysis for {} parcellation.".format(label))
            print(e)
            continue
Exemplo n.º 14
0
Arquivo: register.py Projeto: j1c/m2g
    def align(self,
              inp,
              ref,
              xfm=None,
              out=None,
              dof=12,
              searchrad=True,
              bins=256,
              interp=None,
              cost="mutualinfo"):
        """
        Aligns two images and stores the transform between them

        **Positional Arguments:**

                inp:
                    - Input impage to be aligned as a nifti image file
                ref:
                    - Image being aligned to as a nifti image file
                xfm:
                    - Returned transform between two images
                out:
                    - determines whether the image will be automatically
                    aligned.
                dof:
                    - the number of degrees of freedom of the alignment.
                searchrad:
                    - a bool indicating whether to use the predefined
                    searchradius parameter (180 degree sweep in x, y, and z).
                interp:
                    - the interpolation method to use. Default is trilinear.
        """
        cmd = "flirt -in {} -ref {}".format(inp, ref)
        if xfm is not None:
            cmd += " -omat {}".format(xfm)
        if out is not None:
            cmd += " -out {}".format(out)
        if dof is not None:
            cmd += " -dof {}".format(dof)
        if bins is not None:
            cmd += " -bins {}".format(bins)
        if interp is not None:
            cmd += " -interp {}".format(interp)
        if cost is not None:
            cmd += " -cost {}".format(cost)
        if searchrad is not None:
            cmd += " -searchrx -180 180 -searchry -180 180 " +\
                   "-searchrz -180 180"
        mgu.execute_cmd(cmd, verb=True)
Exemplo n.º 15
0
Arquivo: register.py Projeto: j1c/m2g
    def combine_xfms(self, xfm1, xfm2, xfmout):
        """
        A function to combine two transformations, and output the
        resulting transformation.

        **Positional Arguments**
            xfm1:
                - the path to the first transformation
            xfm2:
                - the path to the second transformation
            xfmout:
                - the path to the output transformation
        """
        cmd = "convert_xfm -omat {} -concat {} {}".format(xfmout, xfm1, xfm2)
        mgu.execute_cmd(cmd, verb=True)
Exemplo n.º 16
0
    def combine_xfms(self, xfm1, xfm2, xfmout):
        """
        A function to combine two transformations, and output the
        resulting transformation.

        **Positional Arguments**
            xfm1:
                - the path to the first transformation
            xfm2:
                - the path to the second transformation
            xfmout:
                - the path to the output transformation
        """
        cmd = "convert_xfm -omat {} -concat {} {}".format(xfmout, xfm1, xfm2)
        mgu.execute_cmd(cmd, verb=True)
Exemplo n.º 17
0
Arquivo: register.py Projeto: j1c/m2g
    def resample_fsl(self, base, res, template):
        """
        A function to resample a base image in fsl to that of a template.
        **Positional Arguments:**

           base:
                - the path to the base image to resample.
            res:
                - the filename after resampling.
            template:
                - the template image to align to.
        """
        goal_res = int(nb.load(template).get_header().get_zooms()[0])
        cmd = "flirt -in {} -ref {} -out {} -nosearch -applyisoxfm {}"
        cmd = cmd.format(base, template, res, goal_res)
        mgu.execute_cmd(cmd, verb=True)
Exemplo n.º 18
0
    def resample_fsl(self, base, res, template):
        """
        A function to resample a base image in fsl to that of a template.
        **Positional Arguments:**

           base:
                - the path to the base image to resample.
            res:
                - the filename after resampling.
            template:
                - the template image to align to.
        """
        goal_res = int(nb.load(template).get_header().get_zooms()[0])
        cmd = "flirt -in {} -ref {} -out {} -nosearch -applyisoxfm {}"
        cmd = cmd.format(base, template, res, goal_res)
        mgu.execute_cmd(cmd, verb=True)
Exemplo n.º 19
0
    def applyxfm(self, inp, ref, xfm, aligned):
        """
        Aligns two images with a given transform

        **Positional Arguments:**

                inp:
                    - Input impage to be aligned as a nifti image file
                ref:
                    - Image being aligned to as a nifti image file
                xfm:
                    - Transform between two images
                aligned:
                    - Aligned output image as a nifti image file
        """
        cmd = "flirt -in {} -ref {} -out {} -init {} -interp trilinear -applyxfm"
        cmd = cmd.format(inp, ref, aligned, xfm)
        mgu.execute_cmd(cmd, verb=True)
Exemplo n.º 20
0
Arquivo: register.py Projeto: j1c/m2g
    def applyxfm(self, inp, ref, xfm, aligned):
        """
        Aligns two images with a given transform

        **Positional Arguments:**

                inp:
                    - Input impage to be aligned as a nifti image file
                ref:
                    - Image being aligned to as a nifti image file
                xfm:
                    - Transform between two images
                aligned:
                    - Aligned output image as a nifti image file
        """
        cmd = "flirt -in {} -ref {} -out {} -init {} -interp trilinear -applyxfm"
        cmd = cmd.format(inp, ref, aligned, xfm)
        mgu.execute_cmd(cmd, verb=True)
Exemplo n.º 21
0
Arquivo: bids.py Projeto: j1c/m2g
def s3_get_data(bucket, remote, local, public=True):
    """
    Given an s3 bucket, data location on the bucket, and a download location,
    crawls the bucket and recursively pulls all data.
    """
    client = boto3.client('s3')
    if not public:
        bkts = [bk['Name'] for bk in client.list_buckets()['Buckets']]
        if bucket not in bkts:
            sys.exit("Error: could not locate bucket. Available buckets: " +
                     ", ".join(bkts))

    cmd = 'aws s3 cp --recursive s3://{}/{}/ {}'.format(bucket, remote, local)
    if public:
        cmd += ' --no-sign-request --region=us-east-1'

    std, err = mgu.execute_cmd('mkdir -p {}'.format(local))
    std, err = mgu.execute_cmd(cmd)
Exemplo n.º 22
0
def group_level(inDir, outDir, dataset=None, atlas=None, minimal=False,
                log=False, hemispheres=False, dwi=True):
    """
    Crawls the output directory from ndmg and computes qc metrics on the
    derivatives produced
    """
    if not dwi:
        print("Currently there is no group level analysis for fmri.")
        return -1

    outDir = op.join(outDir, 'qa', 'graphs')
    mgu.execute_cmd("mkdir -p {}".format(outDir))

    labels_used = next(os.walk(inDir))[1]

    if atlas is not None:
        labels_used = [atlas]

    for skip in skippers:
        if skip in labels_used:
            print("Skipping {} parcellation".format(skip))
            labels_used.remove(skip)
            continue

    for label in labels_used:
        print("Parcellation: {}".format(label))
        tmp_in = op.join(inDir, label)
        fs = [op.join(tmp_in, fl)
              for root, dirs, files in os.walk(tmp_in)
              for fl in files
              if fl.endswith(".graphml") or fl.endswith(".gpickle") or fl.endswith('edgelist')]
        tmp_out = op.join(outDir, label)
        mgu.execute_cmd("mkdir -p {}".format(tmp_out))
        try:
            compute_metrics(fs, tmp_out, label)
            outf = op.join(tmp_out, '{}_plot'.format(label))
            make_panel_plot(tmp_out, outf, dataset=dataset, atlas=label,
                            minimal=minimal, log=log, hemispheres=hemispheres)
        except Exception as e:
            print("Failed group analysis for {} parcellation.".format(label))
            print(e)
            continue
Exemplo n.º 23
0
    def align(self, inp, ref, xfm=None, out=None, dof=12, searchrad=True,
              bins=256, interp=None, cost="mutualinfo"):
        """
        Aligns two images and stores the transform between them

        **Positional Arguments:**

                inp:
                    - Input impage to be aligned as a nifti image file
                ref:
                    - Image being aligned to as a nifti image file
                xfm:
                    - Returned transform between two images
                out:
                    - determines whether the image will be automatically
                    aligned.
                dof:
                    - the number of degrees of freedom of the alignment.
                searchrad:
                    - a bool indicating whether to use the predefined
                    searchradius parameter (180 degree sweep in x, y, and z).
                interp:
                    - the interpolation method to use. Default is trilinear.
        """
        cmd = "flirt -in {} -ref {}".format(inp, ref)
        if xfm is not None:
            cmd += " -omat {}".format(xfm)
        if out is not None:
            cmd += " -out {}".format(out)
        if dof is not None:
            cmd += " -dof {}".format(dof)
        if bins is not None:
            cmd += " -bins {}".format(bins)
        if interp is not None:
            cmd += " -interp {}".format(interp)
        if cost is not None:
            cmd += " -cost {}".format(cost)
        if searchrad is not None:
            cmd += " -searchrx -180 180 -searchry -180 180 " +\
                   "-searchrz -180 180"
        mgu.execute_cmd(cmd, verb=True)
Exemplo n.º 24
0
Arquivo: ndmg_bids.py Projeto: j1c/m2g
def session_level(inDir,
                  outDir,
                  subjs,
                  sesh=None,
                  debug=False,
                  stc=None,
                  dwi=True):
    """
    Crawls the given BIDS organized directory for data pertaining to the given
    subject and session, and passes necessary files to ndmg_pipeline for
    processing.
    """
    labels, atlas, atlas_mask, atlas_brain, lv_maks = get_atlas(atlas_dir, dwi)

    mgu.execute_cmd("mkdir -p {} {}/tmp".format(outDir, outDir))

    anat, func, dwi, bvec, bval = crawl_bids_directory(inDir, subjs, sesh)

    if dwi:
        assert (len(anat) == len(dwi))
        assert (len(bvec) == len(dwi))
        assert (len(bval) == len(dwi))
    else:
        assert (len(anat) == len(func))

    for i, scans in enumerate(anat):
        print("T1 file: {}".format(anat[i]))
        if dwi:
            print("DWI file: {}".format(dwi[i]))
            print("Bval file: {}".format(bval[i]))
            print("Bvec file: {}".format(bvec[i]))

            ndmg_dwi_pipeline(dwi[i],
                              bval[i],
                              bvec[i],
                              anat[i],
                              atlas,
                              atlas_mask,
                              labels,
                              outDir,
                              clean=(not debug))
Exemplo n.º 25
0
    def align_slices(self, dwi, corrected_dwi, idx):
        """
        Performs eddy-correction (or self-alignment) of a stack of 3D images

        **Positional Arguments:**
                dwi:
                    - 4D (DTI) image volume as a nifti file
                corrected_dwi:
                    - Corrected and aligned DTI volume in a nifti file
                idx:
                    - Index of the first B0 volume in the stack
        """
        cmd = "eddy_correct {} {} {}".format(dwi, corrected_dwi, idx)
        status = mgu.execute_cmd(cmd, verb=True)
Exemplo n.º 26
0
Arquivo: register.py Projeto: j1c/m2g
    def align_slices(self, dwi, corrected_dwi, idx):
        """
        Performs eddy-correction (or self-alignment) of a stack of 3D images

        **Positional Arguments:**
                dwi:
                    - 4D (DTI) image volume as a nifti file
                corrected_dwi:
                    - Corrected and aligned DTI volume in a nifti file
                idx:
                    - Index of the first B0 volume in the stack
        """
        cmd = "eddy_correct {} {} {}".format(dwi, corrected_dwi, idx)
        status = mgu.execute_cmd(cmd, verb=True)
Exemplo n.º 27
0
def submit_jobs(jobs, jobdir):
    """
    Give list of jobs to submit, submits them to AWS Batch
    """
    cmd_template = 'aws batch submit-job --cli-input-json file://{}'

    for job in jobs:
        cmd = cmd_template.format(job)
        print("... Submitting job {}...".format(job))
        out, err = mgu.execute_cmd(cmd)
        submission = ast.literal_eval(out)
        print("Job Name: {}, Job ID: {}".format(submission['jobName'],
                                                submission['jobId']))
        sub_file = os.path.join(jobdir, 'ids', submission['jobName'] + '.json')
        with open(sub_file, 'w') as outfile:
            json.dump(submission, outfile)
    return 0
def submit_jobs(jobs, jobdir):
    """
    Give list of jobs to submit, submits them to AWS Batch
    """
    cmd_template = 'aws batch submit-job --cli-input-json file://{}'

    for job in jobs:
        cmd = cmd_template.format(job)
        print("... Submitting job {}...".format(job))
        out, err = mgu.execute_cmd(cmd)
        submission = ast.literal_eval(out)
        print("Job Name: {}, Job ID: {}".format(submission['jobName'],
                                                submission['jobId']))
        sub_file = os.path.join(jobdir, 'ids', submission['jobName']+'.json')
        with open(sub_file, 'w') as outfile:
            json.dump(submission, outfile)
    return 0
Exemplo n.º 29
0
Arquivo: register.py Projeto: j1c/m2g
    def align_nonlinear(self, inp, ref, xfm, warp, mask=None):
        """
        Aligns two images using nonlinear methods and stores the
        transform between them.

        **Positional Arguments:**

            inp:
                - the input image.
            ref:
                - the reference image.
            affxfm:
                - the affine transform to use.
            warp:
                - the path to store the nonlinear warp.
            mask:
                - a mask in which voxels will be extracted
                during nonlinear alignment.
        """
        cmd = "fnirt --in={} --aff={} --cout={} --ref={} --subsamp=4,2,1,1"
        cmd = cmd.format(inp, xfm, warp, ref)
        if mask is not None:
            cmd += " --refmask={}".format(mask)
        out, err = mgu.execute_cmd(cmd, verb=True)
Exemplo n.º 30
0
    def align_nonlinear(self, inp, ref, xfm, warp, mask=None):
        """
        Aligns two images using nonlinear methods and stores the
        transform between them.

        **Positional Arguments:**

            inp:
                - the input image.
            ref:
                - the reference image.
            affxfm:
                - the affine transform to use.
            warp:
                - the path to store the nonlinear warp.
            mask:
                - a mask in which voxels will be extracted
                during nonlinear alignment.
        """
        cmd = "fnirt --in={} --aff={} --cout={} --ref={} --subsamp=4,2,1,1"
        cmd = cmd.format(inp, xfm, warp, ref)
        if mask is not None:
            cmd += " --refmask={}".format(mask)
        out, err = mgu.execute_cmd(cmd, verb=True)
def create_json(bucket, path, threads, jobdir, group=False, credentials=None,
                debug=False, dataset=None, log=False, stc=None, mode='func'):
    """
    Takes parameters to make jsons
    """
    mgu.execute_cmd("mkdir -p {}".format(jobdir))
    mgu.execute_cmd("mkdir -p {}/jobs/".format(jobdir))
    mgu.execute_cmd("mkdir -p {}/ids/".format(jobdir))
    if group:
        template = group_templ
        atlases = threads
    else:
        template = participant_templ
        seshs = threads

    if not os.path.isfile('{}/{}'.format(jobdir, template.split('/')[-1])):
        cmd = 'wget --quiet -P {} {}'.format(jobdir, template)
        mgu.execute_cmd(cmd)

    with open('{}/{}'.format(jobdir, template.split('/')[-1]), 'r') as inf:
        template = json.load(inf)
    cmd = template['containerOverrides']['command']
    env = template['containerOverrides']['environment']

    if credentials is not None:
        cred = [line for line in csv.reader(open(credentials))]
        env[0]['value'] = [cred[1][idx]
                           for idx, val in enumerate(cred[0])
                           if "ID" in val][0]  # Adds public key ID to env
        env[1]['value'] = [cred[1][idx]
                           for idx, val in enumerate(cred[0])
                           if "Secret" in val][0]  # Adds secret key to env
    else:
        env = []
    template['containerOverrides']['environment'] = env
    jobs = list()
    cmd[3] = re.sub('(<MODE>)', mode, cmd[3])
    cmd[5] = re.sub('(<BUCKET>)', bucket, cmd[5])
    cmd[7] = re.sub('(<PATH>)', path, cmd[7])
    cmd[12] = re.sub('(<STC>)', stc, cmd[12])
    if group:
        if dataset is not None:
            cmd[10] = re.sub('(<DATASET>)', dataset, cmd[10])
        else:
            cmd[10] = re.sub('(<DATASET>)', '', cmd[10])

        batlas = ['slab907', 'DS03231', 'DS06481', 'DS16784', 'DS72784']
        for atlas in atlases:
            if atlas in batlas:
                print("... Skipping {} parcellation".format(atlas))
                continue
            print("... Generating job for {} parcellation".format(atlas))
            job_cmd = deepcopy(cmd)
            job_cmd[12] = re.sub('(<ATLAS>)', atlas, job_cmd[12])
            if log:
                job_cmd += ['--log']
            if atlas == 'desikan':
                job_cmd += ['--hemispheres']

            job_json = deepcopy(template)
            ver = ndmg.version.replace('.', '-')
            if dataset:
                name = 'ndmg_{}_{}_{}'.format(ver, dataset, atlas)
            else:
                name = 'ndmg_{}_{}'.format(ver, atlas)
            job_json['jobName'] = name
            job_json['containerOverrides']['command'] = job_cmd
            job = os.path.join(jobdir, 'jobs', name+'.json')
            with open(job, 'w') as outfile:
                json.dump(job_json, outfile)
            jobs += [job]

    else:
        for subj in seshs.keys():
            print("... Generating job for sub-{}".format(subj))
            for sesh in seshs[subj]:
                job_cmd = deepcopy(cmd)
                job_cmd[9] = re.sub('(<SUBJ>)', subj, job_cmd[9])
                if sesh is not None:
                    job_cmd += [u'--session_label']
                    job_cmd += [u'{}'.format(sesh)]
                if debug:
                    job_cmd += [u'--debug']

                job_json = deepcopy(template)
                ver = ndmg.version.replace('.', '-')
                if dataset:
                    name = 'ndmg_{}_{}_sub-{}'.format(ver, dataset, subj)
                else:
                    name = 'ndmg_{}_sub-{}'.format(ver, subj)
                if sesh is not None:
                    name = '{}_ses-{}'.format(name, sesh)
                job_json['jobName'] = name
                job_json['containerOverrides']['command'] = job_cmd
                job = os.path.join(jobdir, 'jobs', name+'.json')
                with open(job, 'w') as outfile:
                    json.dump(job_json, outfile)
                jobs += [job]
    return jobs
Exemplo n.º 32
0
def ndmg_dwi_pipeline(dwi,
                      bvals,
                      bvecs,
                      mprage,
                      atlas,
                      mask,
                      labels,
                      outdir,
                      clean=False,
                      fmt='edgelist'):
    """
    Creates a brain graph from MRI data
    """
    startTime = datetime.now()

    # Create derivative output directories
    dwi_name = mgu.get_filename(dwi)
    cmd = "mkdir -p {}/reg/dwi {}/tensors {}/fibers {}/graphs \
           {}/qa/tensors {}/qa/tensors {}/qa/fibers {}/qa/reg/dwi"

    cmd = cmd.format(*([outdir] * 8))
    mgu.execute_cmd(cmd)

    # Graphs are different because of multiple parcellations
    if isinstance(labels, list):
        label_name = [mgu.get_filename(x) for x in labels]
        for label in label_name:
            mgu.execute_cmd("mkdir -p {}/graphs/{}".format(outdir, label))
    else:
        label_name = mgu.get_filename(labels)
        mgu.execute_cmd("mkdir -p {}/graphs/".format(outdir, label_name))

    # Create derivative output file names
    aligned_dwi = "{}/reg/dwi/{}_aligned.nii.gz".format(outdir, dwi_name)
    tensors = "{}/tensors/{}_tensors.npz".format(outdir, dwi_name)
    fibers = "{}/fibers/{}_fibers.npz".format(outdir, dwi_name)
    print("This pipeline will produce the following derivatives...")
    print("DWI volume registered to atlas: {}".format(aligned_dwi))
    print("Diffusion tensors in atlas space: {}".format(tensors))
    print("Fiber streamlines in atlas space: {}".format(fibers))

    # Again, graphs are different
    graphs = [
        "{}/graphs/{}/{}_{}.{}".format(outdir, x, dwi_name, x, fmt)
        for x in label_name
    ]
    print("Graphs of streamlines downsampled to given labels: " +
          ", ".join([x for x in graphs]))

    # Creates gradient table from bvalues and bvectors
    print("Generating gradient table...")
    dwi1 = "{}/tmp/{}_t1.nii.gz".format(outdir, dwi_name)
    bvecs1 = "{}/tmp/{}_1.bvec".format(outdir, dwi_name)
    mgp.rescale_bvec(bvecs, bvecs1)
    gtab = mgu.load_bval_bvec_dwi(bvals, bvecs1, dwi, dwi1)

    # Align DWI volumes to Atlas
    print("Aligning volumes...")
    mgr().dwi2atlas(dwi1, gtab, mprage, atlas, aligned_dwi, outdir, clean)
    loc0 = np.where(gtab.b0s_mask)[0][0]
    reg_mri_pngs(aligned_dwi, atlas, "{}/qa/reg/dwi/".format(outdir), loc=loc0)

    print("Beginning tractography...")
    # Compute tensors and track fiber streamlines
    tens, tracks = mgt().eudx_basic(aligned_dwi, mask, gtab, stop_val=0.2)
    tensor2fa(tens, tensors, aligned_dwi, "{}/tensors/".format(outdir),
              "{}/qa/tensors/".format(outdir))

    # As we've only tested VTK plotting on MNI152 aligned data...
    if nb.load(mask).get_data().shape == (182, 218, 182):
        try:
            visualize_fibs(tracks, fibers, mask,
                           "{}/qa/fibers/".format(outdir), 0.02)
        except:
            print("Fiber QA failed - VTK for Python not configured properly.")

    # And save them to disk
    np.savez(tensors, tens)
    np.savez(fibers, tracks)

    # Generate graphs from streamlines for each parcellation
    for idx, label in enumerate(label_name):
        print("Generating graph for {} parcellation...".format(label))

        labels_im = nb.load(labels[idx])
        g1 = mgg(len(np.unique(labels_im.get_data())) - 1, labels[idx])
        g1.make_graph(tracks)
        g1.summary()
        g1.save_graph(graphs[idx], fmt=fmt)

    print("Execution took: {}".format(datetime.now() - startTime))

    # Clean temp files
    if clean:
        print("Cleaning up intermediate files... ")
        cmd = 'rm -f {} tmp/{}* {} {}'.format(tensors, dwi_name, aligned_dwi,
                                              fibers)
        mgu.execute_cmd(cmd)

    print("Complete!")
Exemplo n.º 33
0
    def dwi2atlas(self, dwi, gtab, t1w, atlas,
                  aligned_dwi, outdir, clean=False):
        """
        Aligns two images and stores the transform between them

        **Positional Arguments:**

                dwi:
                    - Input impage to be aligned as a nifti image file
                gtab:
                    - object containing gradient directions and strength
                t1w:
                    - Intermediate image being aligned to as a nifti image file
                atlas:
                    - Terminal image being aligned to as a nifti image file
                aligned_dwi:
                    - Aligned output dwi image as a nifti image file
                outdir:
                    - Directory for derivatives to be stored
        """
        # Creates names for all intermediate files used
        dwi_name = mgu.get_filename(dwi)
        t1w_name = mgu.get_filename(t1w)
        atlas_name = mgu.get_filename(atlas)

        dwi2 = mgu.name_tmps(outdir, dwi_name, "_t2.nii.gz")
        temp_aligned = mgu.name_tmps(outdir, dwi_name, "_ta.nii.gz")
        temp_aligned2 = mgu.name_tmps(outdir, dwi_name, "_ta2.nii.gz")
        b0 = mgu.name_tmps(outdir, dwi_name, "_b0.nii.gz")
        t1w_brain = mgu.name_tmps(outdir, t1w_name, "_ss.nii.gz")
        xfm = mgu.name_tmps(outdir, t1w_name,
                            "_" + atlas_name + "_xfm.mat")

        # Align DTI volumes to each other
        self.align_slices(dwi, dwi2, np.where(gtab.b0s_mask)[0][0])

        # Loads DTI image in as data and extracts B0 volume
        dwi_im = nb.load(dwi2)
        b0_im = mgu.get_b0(gtab, dwi_im.get_data())

        # Wraps B0 volume in new nifti image
        b0_head = dwi_im.get_header()
        b0_head.set_data_shape(b0_head.get_data_shape()[0:3])
        b0_out = nb.Nifti1Image(b0_im, affine=dwi_im.get_affine(),
                                header=b0_head)
        b0_out.update_header()
        nb.save(b0_out, b0)

        # Applies skull stripping to T1 volume, then EPI alignment to T1
        mgu.extract_brain(t1w, t1w_brain, ' -B')
        self.align_epi(dwi2, t1w, t1w_brain, temp_aligned)

        # Applies linear registration from T1 to template
        self.align(t1w, atlas, xfm)

        # Applies combined transform to dwi image volume
        self.applyxfm(temp_aligned, atlas, xfm, temp_aligned2)
        self.resample(temp_aligned2, aligned_dwi, atlas)

        if clean:
            cmd = "rm -f {} {} {} {} {}*".format(dwi2, temp_aligned, b0,
                                                 xfm, t1w_name)
            print("Cleaning temporary registration files...")
            mgu.execute_cmd(cmd)
Exemplo n.º 34
0
def create_json(bucket,
                path,
                threads,
                jobdir,
                group=False,
                credentials=None,
                debug=False,
                dataset=None,
                log=False,
                stc=None,
                mode='func'):
    """
    Takes parameters to make jsons
    """
    mgu.execute_cmd("mkdir -p {}".format(jobdir))
    mgu.execute_cmd("mkdir -p {}/jobs/".format(jobdir))
    mgu.execute_cmd("mkdir -p {}/ids/".format(jobdir))
    if group:
        template = group_templ
        atlases = threads
    else:
        template = participant_templ
        seshs = threads

    if not os.path.isfile('{}/{}'.format(jobdir, template.split('/')[-1])):
        cmd = 'wget --quiet -P {} {}'.format(jobdir, template)
        mgu.execute_cmd(cmd)

    with open('{}/{}'.format(jobdir, template.split('/')[-1]), 'r') as inf:
        template = json.load(inf)
    cmd = template['containerOverrides']['command']
    env = template['containerOverrides']['environment']

    if credentials is not None:
        cred = [line for line in csv.reader(open(credentials))]
        env[0]['value'] = [
            cred[1][idx] for idx, val in enumerate(cred[0]) if "ID" in val
        ][0]  # Adds public key ID to env
        env[1]['value'] = [
            cred[1][idx] for idx, val in enumerate(cred[0]) if "Secret" in val
        ][0]  # Adds secret key to env
    else:
        env = []
    template['containerOverrides']['environment'] = env
    jobs = list()
    cmd[3] = re.sub('(<MODE>)', mode, cmd[3])
    cmd[5] = re.sub('(<BUCKET>)', bucket, cmd[5])
    cmd[7] = re.sub('(<PATH>)', path, cmd[7])
    cmd[12] = re.sub('(<STC>)', stc, cmd[12])
    if group:
        if dataset is not None:
            cmd[10] = re.sub('(<DATASET>)', dataset, cmd[10])
        else:
            cmd[10] = re.sub('(<DATASET>)', '', cmd[10])

        batlas = ['slab907', 'DS03231', 'DS06481', 'DS16784', 'DS72784']
        for atlas in atlases:
            if atlas in batlas:
                print("... Skipping {} parcellation".format(atlas))
                continue
            print("... Generating job for {} parcellation".format(atlas))
            job_cmd = deepcopy(cmd)
            job_cmd[12] = re.sub('(<ATLAS>)', atlas, job_cmd[12])
            if log:
                job_cmd += ['--log']
            if atlas == 'desikan':
                job_cmd += ['--hemispheres']

            job_json = deepcopy(template)
            ver = ndmg.version.replace('.', '-')
            if dataset:
                name = 'ndmg_{}_{}_{}'.format(ver, dataset, atlas)
            else:
                name = 'ndmg_{}_{}'.format(ver, atlas)
            job_json['jobName'] = name
            job_json['containerOverrides']['command'] = job_cmd
            job = os.path.join(jobdir, 'jobs', name + '.json')
            with open(job, 'w') as outfile:
                json.dump(job_json, outfile)
            jobs += [job]

    else:
        for subj in seshs.keys():
            print("... Generating job for sub-{}".format(subj))
            for sesh in seshs[subj]:
                job_cmd = deepcopy(cmd)
                job_cmd[9] = re.sub('(<SUBJ>)', subj, job_cmd[9])
                if sesh is not None:
                    job_cmd += [u'--session_label']
                    job_cmd += [u'{}'.format(sesh)]
                if debug:
                    job_cmd += [u'--debug']

                job_json = deepcopy(template)
                ver = ndmg.version.replace('.', '-')
                if dataset:
                    name = 'ndmg_{}_{}_sub-{}'.format(ver, dataset, subj)
                else:
                    name = 'ndmg_{}_sub-{}'.format(ver, subj)
                if sesh is not None:
                    name = '{}_ses-{}'.format(name, sesh)
                job_json['jobName'] = name
                job_json['containerOverrides']['command'] = job_cmd
                job = os.path.join(jobdir, 'jobs', name + '.json')
                with open(job, 'w') as outfile:
                    json.dump(job_json, outfile)
                jobs += [job]
    return jobs
Exemplo n.º 35
0
Arquivo: register.py Projeto: j1c/m2g
    def dwi2atlas(self,
                  dwi,
                  gtab,
                  t1w,
                  atlas,
                  aligned_dwi,
                  outdir,
                  clean=False):
        """
        Aligns two images and stores the transform between them

        **Positional Arguments:**

                dwi:
                    - Input impage to be aligned as a nifti image file
                gtab:
                    - object containing gradient directions and strength
                t1w:
                    - Intermediate image being aligned to as a nifti image file
                atlas:
                    - Terminal image being aligned to as a nifti image file
                aligned_dwi:
                    - Aligned output dwi image as a nifti image file
                outdir:
                    - Directory for derivatives to be stored
        """
        # Creates names for all intermediate files used
        dwi_name = mgu.get_filename(dwi)
        t1w_name = mgu.get_filename(t1w)
        atlas_name = mgu.get_filename(atlas)

        dwi2 = mgu.name_tmps(outdir, dwi_name, "_t2.nii.gz")
        temp_aligned = mgu.name_tmps(outdir, dwi_name, "_ta.nii.gz")
        temp_aligned2 = mgu.name_tmps(outdir, dwi_name, "_ta2.nii.gz")
        b0 = mgu.name_tmps(outdir, dwi_name, "_b0.nii.gz")
        t1w_brain = mgu.name_tmps(outdir, t1w_name, "_ss.nii.gz")
        xfm = mgu.name_tmps(outdir, t1w_name, "_" + atlas_name + "_xfm.mat")

        # Align DTI volumes to each other
        self.align_slices(dwi, dwi2, np.where(gtab.b0s_mask)[0][0])

        # Loads DTI image in as data and extracts B0 volume
        dwi_im = nb.load(dwi2)
        b0_im = mgu.get_b0(gtab, dwi_im.get_data())

        # Wraps B0 volume in new nifti image
        b0_head = dwi_im.get_header()
        b0_head.set_data_shape(b0_head.get_data_shape()[0:3])
        b0_out = nb.Nifti1Image(b0_im,
                                affine=dwi_im.get_affine(),
                                header=b0_head)
        b0_out.update_header()
        nb.save(b0_out, b0)

        # Applies skull stripping to T1 volume, then EPI alignment to T1
        mgu.extract_brain(t1w, t1w_brain, ' -B')
        self.align_epi(dwi2, t1w, t1w_brain, temp_aligned)

        # Applies linear registration from T1 to template
        self.align(t1w, atlas, xfm)

        # Applies combined transform to dwi image volume
        self.applyxfm(temp_aligned, atlas, xfm, temp_aligned2)
        self.resample(temp_aligned2, aligned_dwi, atlas)

        if clean:
            cmd = "rm -f {} {} {} {} {}*".format(dwi2, temp_aligned, b0, xfm,
                                                 t1w_name)
            print("Cleaning temporary registration files...")
            mgu.execute_cmd(cmd)