예제 #1
0
def read_ordering_file(fname):
    if isinstance(fname, list):
        labnam = []
        deleters = []
        for i, item in enumerate(fname):
            if item == 'delete':
                deleters.append(i)
            else:
                labnam.append(item)
        return labnam, deleters

    labnam = []
    deleters = []
    with open(fname, 'r') as fd:
        i = 0
        for line in fd:
            l = line.strip().lower()
            if l == 'delete':
                deleters.append(i)
            else:
                labnam.append(l)
            i += 1

    #try to raise an exception if this is not a real text file
    try:
        str(unicode(labnam[0]))
    except UnicodeDecodeError as e:
        raise CVUError("This doesn't look like a text file: %s" % fname)
    except IndexError:
        raise CVUError('Ordering file %s is empty or has only deletes' % fname)

    return labnam, deleters
예제 #2
0
    def add_dataset(self, ds, display_metadata, panel=None, group=None):
        '''Given a dataset, add it to the controller.  If panel and group
           are specified, add it to that panel, otherwise any panel will do'''
        if ds.name in self.ds_instances:
            raise CVUError('A dataset with this name already exists')

        if panel is None and group is None:
            panel_ref = self._create_new_panel(ds.name)
            panel = panel_ref.panel_name
            group = 2 if panel_ref.is_full(group=1) else 1
        elif group is not None:
            raise CVUError('Cannot specify group without panel')
        elif panel is not None:
            panel_ref = self.get_named_panel(panel)
            if panel_ref.is_full():
                raise CVUError('That panel is full')
            group = 2 if panel_ref.is_full(group=1) else 1
        else:
            panel_ref = self.get_named_panel(panel)

        panel_ref.populate(ds, group=group)  #group can be none, doesnt matter

        ds_meta = DatasetUIMetadata(self, panel, ds.name, display_metadata)
        self.ds_instances.update({ds.name: ds})
        self.ds_metadatae.update({ds.name: ds_meta})
        self.panel_instances.update({panel_ref.panel_name: panel_ref})
        self.panel_metadatae.update({panel_ref.panel_name: ds_meta})

        self.show_panel(panel_ref)
예제 #3
0
def flip_adj_ord(adj, adjlabfile, labnam, ign_dels=False):
    import numpy as np
    if adjlabfile == None or adjlabfile == '':
        return adj
    init_ord, bads = read_ordering_file(adjlabfile)
    #delete the extras
    if not ign_dels:
        adj = np.delete(adj, bads, axis=0)
        adj = np.delete(adj, bads, axis=1)
    #if adj ordering is a different size than the new adjmat, we can't
    #possibly know how to fix it.  crash outright.
    if len(init_ord) != len(adj):
        raise CVUError(
            'The adjmat ordering file %s has %i entries '
            'after deletions, but the adjmat specified has %i regions.' %
            (adjlabfile, len(init_ord), len(adj)))
    adj_ord = adj_sort(init_ord, labnam)
    #get rid of the None items, regions not in parc ordering
    ord_extras_rm = np.ma.masked_equal(adj_ord, None)
    adj_ord = np.array(ord_extras_rm.compressed(), dtype=int)
    #swap the new order
    #adj=adj[adj_ord][:,adj_ord]
    adj = adj[np.ix_(adj_ord, adj_ord)]
    #warn about the omitted entries
    if len(adj_ord) != len(init_ord):
        for lab in init_ord:
            if lab not in labnam:
                print(
                    "Warning: Label %s present in adjmat ordering %s "
                    "was not in the current parcellation. It was omitted." %
                    (lab, adjlabfile))
    return adj
예제 #4
0
def calcparc(labels,
             labnam,
             quiet=False,
             parcname=' ',
             subjdir='.',
             subject='fsavg5',
             lhsurf=None,
             rhsurf=None):
    #subjdir and subject are passed here in order to get subcortical
    #structures from a brain other than fsavg5
    import numpy as np
    lab_pos = np.zeros((len(labnam), 3))
    #an nlogn sorting algorithm is theoretically possible here but rather hard
    labs_used = []
    labv = {}

    # return just the vertices associated with the label.
    for lab in labels:
        try:
            i = labnam.index(parse.mangle_hemi(lab.name.lower()))
            labs_used.append(parse.mangle_hemi(lab.name.lower()))
            labv.update({lab.name.lower(): lab.vertices})
        except ValueError:
            if not quiet:
                print("Label %s deleted as requested" % lab.name)
            continue
        lab_pos[i, :] = np.mean(lab.pos, axis=0)
        #print lab.name,lab_pos[i,:]
    #the data seems to be incorrectly scaled by a factor of roughly 1000
    lab_pos *= 1000

    import volume
    valid_subcortical_keys = volume.aseg_rois.keys()
    asegd = None

    for i, lab in enumerate(labnam):
        if lab not in labs_used:
            if lab in valid_subcortical_keys:
                if asegd is None:
                    try:
                        import nibabel as nib
                    except ImportError as e:
                        raise CVUError(
                            'Nibabel is required for handling of '
                            'parcellations with subcortical structures')
                    aseg = nib.load(os.path.join(subject, 'mri', 'aseg.mgz'))
                    asegd = aseg.get_data()
                lab_pos[i, :] = volume.roi_coords(lab,
                                                  asegd,
                                                  subjdir=subjdir,
                                                  subject=subject,
                                                  lhsurf=lhsurf,
                                                  rhsurf=rhsurf)
            #let the user know if parc order file has unrecongized entries
            elif not quiet:
                print("Warning: Label %s not found in parcellation %s" %
                      (lab, parcname))

    return lab_pos, labv
예제 #5
0
def adj_sort(adj_ord, desired_ord):
    if len(adj_ord) < len(desired_ord):
        raise CVUError(
            'Parcellation order is larger than adjmat order.  Parc '
            'ordering has %i (non-delete) entries and adjmat order has %i ' %
            (len(adj_ord), len(desired_ord)))
    keys = {}
    for i, k in enumerate(adj_ord):
        keys.update({k: i})
    return map(keys.get, desired_ord)
예제 #6
0
def sh_cmd(cmd):
    import subprocess
    import os
    with open(os.devnull, 'wb') as devnull:
        try:
            subprocess.check_call(
                cmd,  #stdout=devnull,stderr=subprocess.STDOUT,
                shell=True)
        except subprocess.CalledProcessError as e:
            raise CVUError(str(e))
예제 #7
0
def calcparc_gifti(labnam, labv, surf_struct, quiet=False):
    import numpy as np
    # define constants and reshape surfaces
    vert = np.vstack((surf_struct[0], surf_struct[2]))

    nr_labels = len(labnam)
    nr_verts = len(labv)

    if nr_verts != len(vert):
        print nr_verts
        print len(vert)
        raise CVUError('Parcellation has inconsistent number of vertices')
    if not quiet:
        print 'Surface has ' + str(nr_verts) + ' vertices'
        print('Parcellation has ' + str(nr_labels) +
              ' labels (before bad channel'
              ' removal)')

    lab_pos = np.zeros((nr_labels, 3))

    ## CHECK FOR BAD CHANNELS AND DEFINE LABEL LOCATIONS AS VERTEX AVERAGES ##
    bad_labs = []
    deleters = []

    for i in xrange(0, nr_labels, 1):
        if labnam[i] == 'delete':
            deleters.append(i)
            continue
        curlab = np.flatnonzero(np.array(map(eqfun(labnam[i]), labv)))
        if len(curlab) == 0:
            print("Warning: label " + labnam[i] +
                  ' has no vertices in it.  This '
                  'channel will be deleted')
            bad_labs.append(i)
            continue
        if not quiet:
            print "generating coordinates for " + labnam[i]
        lab_pos[i] = np.mean(vert[curlab], axis=0)

        ## DELETE THE BAD CHANNELS ##
    if len(deleters) > 0:
        print "Removed " + str(len(deleters)) + " bad channels"
        lab_pos = np.delete(lab_pos, deleters, axis=0)
        labnam = np.delete(labnam, deleters, axis=0)
        nr_labels -= len(deleters)
    else:
        print "No bad channels"

    if (len(bad_labs) > 0):
        lab_pos = np.delete(lab_pos, bad_labs, axis=0)
        labnam = np.delete(labnam, bad_labs, axis=0)
        nr_labels -= len(bad_labs)

    return lab_pos
예제 #8
0
 def rename_panel(self, old_name, new_name):
     try:
         panel = self.panel_instances[old_name]
         ds_meta = self.panel_metadatae[old_name]
     except KeyError:
         raise CVUError('No such panel')
     del self.panel_instances[old_name]
     del self.panel_metadatae[old_name]
     self.panel_instances.update({new_name: panel})
     self.panel_metadatae.update({new_name: ds_meta})
     panel.panel_name = new_name
예제 #9
0
 def rename_dataset(self, old_name, new_name):
     try:
         ds = self.ds_instances[old_name]
         ds_meta = self.ds_metadatae[old_name]
     except KeyError:
         raise CVUError('No such dataset')
     del self.ds_instances[old_name]
     del self.ds_metadatae[old_name]
     self.ds_instances.update({new_name: ds})
     self.ds_metadatae.update({new_name: ds_meta})
     ds.name = new_name
예제 #10
0
    def populate(self, ds, ds2=None, group=None, force=False):
        if ds2 is not None: grps = ('group_1', 'group_2')
        elif group == 1 or group is None: grps = ('group_1', )
        elif group == 2: grps = ('group_2', )
        elif self.is_full(): raise CVUError('Panel is full')
        else: raise ValueError('Cannot populate ViewPanel with group >=2')

        if not force:
            for group in grps:
                if self.__getattribute__(group) is not None:
                    raise CVUError('Group specified is full, overwrite with '
                                   'force=True')

        datasets = ((ds, ds2) if (ds2 is not None) else (ds, ))

        for group, d in zip(grps, datasets):
            dvl = DatasetViewportLayout()
            dvl.mayavi_port = Viewport(ds=d)
            dvl.matrix_port = Viewport(ds=d)
            dvl.circle_port = Viewport(ds=d)
            self.__setattr__(group, dvl)
예제 #11
0
def loadmat(fname, field=None, is_adjmat=True):
    import numpy as np
    # well_formed numpy matrix
    if isinstance(fname, np.ndarray) or isinstance(fname, np.matrix):
        mat = fname

    # matlab
    elif fname.endswith('.mat'):
        if not field:
            raise CVUError("For .mat matrices, you must specify a field name")
        import scipy.io
        mat = scipy.io.loadmat(fname)[field]

    # numpy
    elif fname.endswith('.npy'):
        mat = np.load(fname)
    elif fname.endswith('.npz'):
        if not field:
            raise CVUError("For .npz matrices, you must specify a field name")
        mat = np.load(fname)[field]

    # other
    elif fname.endswith('.pkl'):
        raise IOError('Pickled matrices are not supported yet')
    elif fname.endswith('.txt'):
        mat = np.loadtxt(fname)
    else:
        raise IOError(
            'File type not understood.  Only supported matrix '
            'formats are matlab and numpy.  File extensions are used to '
            'differentiate file formats and are not optional.')
        return

    if is_adjmat:
        if mat.ndim != 2 or mat.shape[0] != mat.shape[1]:
            raise CVUError('Adjacency matrix is not square')
        if not np.allclose(mat, mat.T):
            raise CVUError('Adjacency matrix is not symmetric')

    return mat
예제 #12
0
def sh_cmd_grep(cmd, grep):
    #this function is inspired by a similar function from connectomemapper
    import subprocess
    import os
    import random
    import time
    import tempfile
    t = random.randint(1, 10000000)
    try:
        os.mkdir(os.path.join(tempfile.gettempdir(), 'cvu'))
    except OSError:
        pass
    fname = os.path.join(tempfile.gettempdir(), "out_fifo_%s" % str(t))

    try:
        os.unlink(fname)
    except:
        pass

    retln = []
    os.mkfifo(fname)
    try:
        fifo = os.fdopen(os.open(fname, os.O_RDONLY | os.O_NONBLOCK))
        newcmd = "( %s ) 1>%s" % (cmd, fname)
        process = subprocess.Popen(newcmd,
                                   shell=True,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE)

        while process.returncode == None:
            time.sleep(.5)
            process.poll()
            try:
                ln = fifo.readline().strip()
            except:
                continue
            if ln and grep in ln:
                retln.append(ln)
        rem = fifo.read()
        if rem:
            for ln in [ln for ln in rem.split('\n') if ln.strip()]:
                if grep in ln:
                    retln.append(ln)
        if process.returncode:
            raise CVUError('%s failed with error code %s' %
                           (cmd, process.returncode))
    finally:
        try:
            os.unlink(fname)
        except:
            pass
        return retln
예제 #13
0
def sh_cmd_retproc(cmd, debug=False):
    import subprocess
    import os
    with open(os.devnull, 'wb') as devnull:
        outfd = None if debug else devnull

        process = subprocess.Popen(cmd,
                                   shell=True,
                                   stdin=subprocess.PIPE,
                                   stdout=outfd,
                                   stderr=outfd)

        #checks to see if the specified command was bad
        if process.poll():
            process.kill()
            raise CVUError('% failed with error code %s' %
                           (cmd, process.returncode))
        return process
예제 #14
0
    def remove_dataset(self, ds_name):
        #remove the metadata elements associated with this dataset
        ds_meta = self.ds_metadatae[ds_name]
        panel_name = ds_meta.panel

        #dispose of the window if necessary
        panel = self.panel_instances[panel_name]
        panel.conditionally_dispose()

        ds = self.ds_instances[ds_name]
        self.gui.reset_controls(ds)

        try:
            del self.ds_instances[ds_name]
            del self.ds_metadatae[ds_name]
            del self.panel_instances[panel_name]
            del self.panel_metadatae[panel_name]
        except KeyError as e:
            raise CVUError('Inconsistent metadata')
예제 #15
0
def cli_args(argv):
    import getopt; import os
    adjmat_location=None; parcellation_name=None; subject_name=None;
    subjects_dir=None; parcellation_order=None; adjmat_order=None; 
    surface_type=None; field_name=None; max_edges=None; quiet=False;
    script=None

    #check for passed arguments
    try:
        opts,args=getopt.getopt(argv,'p:a:s:o:qd:hvf:',
            ["parc=","adjmat=","adj=","data=","datadir="\
            "surf=","order=","surf-type=","parcdir=",
            "help","field=","subjects-dir=","subject=",
            "max-edges=","max_edges","adj-order=","adj_order=",
            "script="])
    except getopt.GetoptError as e:
        print "Argument %s" % str(e)
        usage()
    for opt,arg in opts:
        if opt in ["-p","--parc"]:
            parcellation_name = arg
        elif opt in ["-a","--adjmat","--adj"]:
            adjmat_location = arg
        elif opt in ["-d","--data","--datadir","--subjects-dir","--parcdir"]:
            subjects_dir = arg
        elif opt in ["-o","--order"]:
            parcellation_order = arg
        elif opt in ["--adj-order","--adj_order"]:
            adjmat_order = arg
        elif opt in ["-s","--surf","--surf-type"]:
            surface_type = arg
        elif opt in ["--subject"]:
            subject_name = arg
        elif opt in ["-q"]:
            quiet = True
        elif opt in ["-v"]:
            pass
        elif opt in ["-h","--help"]:
            usage()
            sys.exit(0)
        elif opt in ["-f","--field"]:
            field_name = arg
        elif opt in ["--max-edges","--max_edges"]:
            max_edges = arg
        elif opt in ["--script"]:
            script = arg

    #assign default values
    if subjects_dir is None:
        subjects_dir = os.path.dirname(os.path.abspath(__file__))
    if adjmat_location is None:
        adjmat_location = 'data/sample_data.npy'
    if parcellation_name is None:
        parcellation_name = 'sparc'
    if parcellation_order is None:
        if parcellation_name != 'sparc':
            raise CVUError('A text file containing channel names must be'
                ' supplied with your parcellation')
        else:
            #TODO export this design pattern for relative paths where necessary
            parcellation_order=os.path.join(subjects_dir,'orders','sparc.txt')
    if surface_type is None:
        surface_type='pial'
    if subject_name is None:
        subject_name='fsavg5'
    if max_edges is None:
        max_edges=20000
    if not os.path.isfile(parcellation_order):
        raise CVUError('Channel names %s file not found' % parcorder)
    if not os.path.isfile(adjmat_location):
        raise CVUError('Adjacency matrix %s file not found' % adjmat)
    if not os.path.isdir(subjects_dir):
        raise CVUError('SUBJECTS_DIR %s file not found' % subjects_dir)
    if adjmat_order and not os.path.isfile(adjmat_order):
        raise CVUError('Adjancency matrix order %s file not found' % adjorder)

    return {'parc':parcellation_name,	'adjmat':adjmat_location,
        'subject':subject_name,			'subjdir':subjects_dir,
        'parcorder':parcellation_order,	'adjorder':adjmat_order,
        'surftype':surface_type,		'maxedges':max_edges,
        'field':field_name,				'quiet':quiet,
        'script':script}
예제 #16
0
 def _get_named_dataset(self, ds_name):
     try:
         return self.ds_instances[ds_name]
     except KeyError as e:
         raise CVUError('No such dataset')
예제 #17
0
 def _get_named_panel(self, panel_name):
     try:
         return self.panel_instances[panel_name]
     except KeyError as e:
         raise CVUError('No such panel')