Example #1
0
    def _check_trapping(self, inv_val):
        r"""
        Determine which pores and throats are trapped by invading phase.  This
        method is called by ``run`` if 'trapping' is set to True.
        """
        # Generate a list containing boolean values for throat state
        Tinvaded = self['throat.inv_Pc'] < sp.inf
        # Add residual throats, if any, to list of invaded throats
        Tinvaded = Tinvaded + self['throat.residual']
        # Invert logic to find defending throats
        Tdefended = ~Tinvaded
        [pclusters, tclusters] = self._net.find_clusters2(mask=Tdefended,
                                                          t_labels=True)
        # See which outlet pores remain uninvaded
        outlets = self['pore.outlets']*(self['pore.inv_Pc'] == sp.inf)
        # Identify clusters connected to remaining outlet sites
        def_clusters = sp.unique(pclusters[outlets])
        temp = sp.in1d(sp.unique(pclusters), def_clusters, invert=True)
        trapped_clusters = sp.unique(pclusters)[temp]
        trapped_clusters = trapped_clusters[trapped_clusters >= 0]

        # Find defending clusters NOT connected to the outlet pores
        pmask = np.in1d(pclusters, trapped_clusters)
        # Store current applied pressure in newly trapped pores
        pinds = (self['pore.trapped'] == sp.inf) * (pmask)
        self['pore.trapped'][pinds] = inv_val

        # Find throats on the trapped defending clusters
        tinds = self._net.find_neighbor_throats(pores=pinds,
                                                mode='intersection')
        self['throat.trapped'][tinds] = inv_val
        self['throat.entry_pressure'][tinds] = 1000000
def main():
    args = getArguments(getParser())

    # prepare logger
    logger = Logger.getInstance()
    if args.debug:
        logger.setLevel(logging.DEBUG)
    elif args.verbose:
        logger.setLevel(logging.INFO)

    # check if output image exists
    if not args.force:
        if os.path.exists(args.output):
            logger.warning("The output image {} already exists. Exiting.".format(args.output))
            exit(-1)

    # load input image
    input_data, input_header = load(args.input)

    logger.debug("Old number of regions={}.".format(len(scipy.unique(input_data))))

    # cut and relabel along the required dimension
    logger.info("Cutting and relabeling...")
    dimensions = range(input_data.ndim)
    del dimensions[args.dimension]
    __split_along(input_data, dimensions)

    logger.debug("New number of regions={}.".format(len(scipy.unique(input_data))))

    # save result contour volume
    save(input_data, args.output, input_header, args.force)

    logger.info("Successfully terminated.")
def batch_metrics(unit_list, threshold, t_ref, t_cen):
    ''' This here function runs metrics on a batch of data.  Pass in units from the catalog.
    '''
    
    from scipy import unique
    
    samp_rate = 30000.
    n_samples = 30
    n_chans = 4
    
    # Find common Sessions
    sessions = unique([unit.session for unit in unit_list])
    
    for session in sessions:
        
        units = session.units
        tetrodes = unique([unit.tetrode for unit in units])
        
        for tetrode in tetrodes:
            data = load_spikes(session.path, tetrode,  samp_rate, n_samples, n_chans)
            f_p, f_n = metrics(data, threshold, t_ref, t_cen, session.duration)
            # Doing this because sometimes there is no cluster 0 sometimes
            f_p.setdefault(1)
            f_n.setdefault(1)
            units = [ unit for unit in session.units if unit.tetrode == tetrode] 
            for unit in units:
                unit.falsePositive = f_p[unit.cluster]
                unit.falseNegative = f_n[unit.cluster]
 def _do_outer_iteration_stage(self):
     #Generate curve from points
     for inv_val in self._inv_points:
         #Apply one applied pressure and determine invaded pores
         logger.info('Applying capillary pressure: '+str(inv_val))
         self._do_one_inner_iteration(inv_val)
     #Store results using networks' get/set method
     self['pore.inv_Pc'] = self._p_inv
     self['throat.inv_Pc'] = self._t_inv
     #Find invasion sequence values (to correspond with IP algorithm)
     self._p_seq = sp.searchsorted(sp.unique(self._p_inv),self._p_inv)
     self._t_seq = sp.searchsorted(sp.unique(self._t_inv),self._t_inv)
     self['pore.inv_seq'] = self._p_seq
     self['throat.inv_seq'] = self._t_seq
     #Calculate Saturations
     v_total = sp.sum(self._net['pore.volume'])+sp.sum(self._net['throat.volume'])
     sat = 0.
     self['pore.inv_sat'] = 1.
     self['throat.inv_sat'] = 1.
     for i in range(self._npts):
         inv_pores = sp.where(self._p_seq==i)[0]
         inv_throats = sp.where(self._t_seq==i)[0]
         new_sat = (sum(self._net['pore.volume'][inv_pores])+sum(self._net['throat.volume'][inv_throats]))/v_total
         sat += new_sat
         self['pore.inv_sat'][inv_pores] = sat
         self['throat.inv_sat'][inv_throats] = sat
Example #5
0
 def _read_sky_logfile(self):
     #TODO : expand to read errors, msgs etc
     # read in the whole sky log file, shouldn't be big
     f = open(self.skylogfile)
     lines = f.readlines()
     f.close()
     dust = [line.split()[1:] for line in lines if line.startswith('dtau_dust')]
     line = [line.split()[1:] for line in lines if line.startswith('dtau_line')]
     dust = _sp.array(dust, dtype='float')
     line = _sp.array(line, dtype='float')
     transitions = _sp.unique(dust[:,0])
     shells = _sp.unique(dust[:,1])
     dtau_dust = dict()
     dtau_line = dict()
     dtau_tot = dict()
     for t in transitions:
         d = []
         l = []
         for s in shells:
             d.append( _sp.mean([i[2] for i in dust if ((i[0]==t) * (i[1]==s))]) )
             l.append( _sp.mean([i[2] for i in line if ((i[0]==t) * (i[1]==s))]) )
         dtau_dust[t] = _sp.copy(d)
         dtau_line[t] = _sp.copy(l)
         dtau_tot[t] = _sp.array(d) + _sp.array(l)
     # create object to store in main class
     class Tau(object):pass
     Tau.dtau_dust = dtau_dust
     Tau.dtau_line = dtau_line
     Tau.dtau_tot = dtau_tot
     Tau.transitions = transitions
     Tau.shells = shells
     self.Tau = Tau
def __compute_affiliation(label_image, mask_image, bounding_boxes):
    """
    Computes which regions of the supplied label_image belong to the mask_image's foreground
    respectively background. When a region belongs to both, it is assigned to the foreground
    if more voxels belong to the foreground than in the background and vice-versa.
    In the case of equal affiliation, the region is assigned to the background.
    @return fg_ids, bg_ids
    """
    # simple extraction
    fg_ids = list(scipy.unique(label_image[mask_image]))
    bg_ids = list(scipy.unique(label_image[~mask_image]))
    # decide for overlapping regions whether they are 50 or more in fg or in bg
    for rid in set(fg_ids) & set(bg_ids):
        relevant_region_label_image = label_image[bounding_boxes[rid - 1]]
        relevant_region_mask_image = mask_image[bounding_boxes[rid - 1]]
        fg_part = 0
        bg_part = 0
        for affiliation, rid2 in zip(relevant_region_mask_image.ravel(), relevant_region_label_image.ravel()):
            if rid2 == rid:
                if affiliation: fg_part += 1
                else: bg_part += 1
        #fg_part = relevant_region_label_image[relevant_region_mask_image]
        #bg_part = relevant_region_label_image[~relevant_region_mask_image]
        if fg_part > bg_part: # if more voxels of region rid in fg than in bg
            bg_ids.remove(rid)
        else:
            fg_ids.remove(rid)
    # debug line, can be removed if the above code is final
    if 0 != len(set(fg_ids) & set(bg_ids)): raise Exception('Error making fg and bg ground truth distinct.') 
    return fg_ids, bg_ids
Example #7
0
def Capillary_Pressure_Curve(net,
                             fluid,
                             capillary_pressure='capillary_pressure',
                             pore_volume='volume',
                             throat_volume='volume',
                             fig=None):
  r"""
  Plot drainage capillary pressure curve

  Parameters
  ----------
  net : OpenPNM Network Object
      The network for which the graphs are desired
  fig : Matplotlib figure object
      Canvas on which to draw plots

  """
  if type(fluid)==str: fluid = net.find_object_by_name(fluid)
  try:
    PcPoints = sp.unique(fluid.get_throat_data(prop=capillary_pressure))
  except KeyError:
    raise Exception('Capillary pressure simulation has not been run')

  PcPoints = sp.unique(fluid.get_throat_data(prop=capillary_pressure))
  Snwp = sp.zeros_like(PcPoints)
  Ps = sp.r_[0:net.num_pores('internal')]
  for i in range(1,sp.size(PcPoints)):
      Pc = PcPoints[i]
      Snwp[i] = sum((fluid.get_throat_data(prop=capillary_pressure)[Ps]<Pc)*(net.get_throat_data(prop=throat_volume)[Ps]))/sum(net.get_throat_data(prop=throat_volume)[Ps])
  
  if fig==None: fig = plt.figure()
  ax = fig.add_subplot(111)
  ax.plot(PcPoints,Snwp,'r.-')
  ax.set_xlabel('Capillary Pressure')
  ax.set_ylabel('Fluid Saturation')
Example #8
0
    def test_set_boundary_conditions_bctypes(self):
        self.alg.setup(invading_phase=self.water,
                       defending_phase=self.air,
                       trapping=True)
        Ps = sp.random.randint(0, self.net.Np, 10)

        self.alg.set_boundary_conditions(pores=Ps, bc_type='inlets')
        assert sp.sum(self.alg['pore.inlets']) == sp.size(sp.unique(Ps))
        self.alg['pore.inlets'] = False

        self.alg.set_boundary_conditions(pores=Ps, bc_type='outlets')
        assert sp.sum(self.alg['pore.outlets']) == sp.size(sp.unique(Ps))
        self.alg['pore.outlets'] = False

        self.alg.set_boundary_conditions(pores=Ps, bc_type='residual')
        assert sp.sum(self.alg['pore.residual']) == sp.size(sp.unique(Ps))
        self.alg['pore.residual'] = False

        flag = False
        try:
            self.alg.set_boundary_conditions(pores=Ps, bc_type='bad_type')
        except:
            flag = True
        assert flag

        flag = False
        try:
            self.alg.set_boundary_conditions(bc_type=None, mode='bad_type')
        except:
            flag = True
        assert flag
Example #9
0
def filter_introns(introns, genes, options):
    
    ### build interval trees of all genes starts and ends
    chrms = sp.array([_.strand for _ in genes])
    strands = sp.array([_.chr for _ in genes])
    gene_trees = dict()
    for c in sp.unique(chrms):
        for s in sp.unique(strands):
            gene_trees[(c, s)] = it.IntervalTree()
            c_idx = sp.where((chrms == c) & (strands == s))[0]
            for i in c_idx:
                gene_trees[(c, s)][genes[i].start:genes[i].stop] = i

    ### match all introns agains trees and remove elements overlapping
    ### more than one gene on the same chr/strand
    cnt_tot = 0
    cnt_rem = 0
    strand_list = ['+', '-']
    offset = options.intron_edges['append_new_terminal_exons_len']
    for si, s in enumerate(strand_list):
        for i in range(introns.shape[0]):
            if introns[i, si].shape[0] == 0:
                continue
            k_idx = []
            cnt_tot += introns[i, si].shape[0]
            for j in range(introns[i, si].shape[0]):
                if len(gene_trees[(s, genes[i].chr)].overlap(introns[i, si][j, 0] - offset, introns[i, si][j, 1] + offset)) == 1:
                    k_idx.append(j)
            if len(k_idx) < introns[i, si].shape[0]:
                cnt_rem += (introns[i, si].shape[0] - len(k_idx))
                introns[i, si] = introns[i, si][k_idx, :]
    print('removed %i of %i (%.2f percent) introns overlapping to no or multiple genes' % (cnt_rem, cnt_tot, cnt_rem / float(max(cnt_tot, 1)) * 100))

    return introns
Example #10
0
def test_linear_solvers():
    pn = OpenPNM.Network.Cubic([1, 40, 30], spacing=0.0001)
    geom = OpenPNM.Geometry.Toray090(network=pn,
                                     pores=pn.pores(),
                                     throats=pn.throats())
    air = OpenPNM.Phases.Air(network=pn)
    phys_air = OpenPNM.Physics.Standard(network=pn,
                                        phase=air,
                                        pores=pn.pores(),
                                        throats=pn.throats())

    BC1_pores = pn.pores(labels=['left'])
    BC2_pores = pn.pores(labels=['right'])

    alg_1 = OpenPNM.Algorithms.FickianDiffusion(network=pn, phase=air)
    alg_1.set_boundary_conditions(bctype='Dirichlet',
                                  bcvalue=1,
                                  pores=BC1_pores)
    alg_1.set_boundary_conditions(bctype='Dirichlet',
                                  bcvalue=0,
                                  pores=BC2_pores)
    alg_1.run(iterative_solver='gmres')

    alg_2 = OpenPNM.Algorithms.FickianDiffusion(network=pn, phase=air)
    alg_2.set_boundary_conditions(bctype='Neumann',
                                  bcvalue=-1e-11,
                                  pores=BC1_pores)
    alg_2.set_boundary_conditions(bctype='Dirichlet',
                                  bcvalue=0,
                                  pores=BC2_pores)
    alg_2.run(iterative_solver='cg')

    alg_3 = OpenPNM.Algorithms.FickianDiffusion(network=pn, phase=air)
    alg_3.set_boundary_conditions(bctype='Neumann_group',
                                  bcvalue=-3e-10,
                                  pores=BC1_pores)
    alg_3.set_boundary_conditions(bctype='Dirichlet',
                                  bcvalue=0,
                                  pores=BC2_pores)
    alg_3.run()

    alg_4 = OpenPNM.Algorithms.FickianDiffusion(network=pn, phase=air)
    alg_4.set_boundary_conditions(bctype='Neumann_group',
                                  bcvalue=-3e-10,
                                  pores=BC1_pores)
    alg_4.set_boundary_conditions(bctype='Dirichlet',
                                  bcvalue=0,
                                  pores=BC2_pores)
    alg_4.setup()
    alg_4.solve()

    assert round(sp.absolute(alg_1.rate(BC1_pores))[0], 16) == round(sp.absolute(alg_1.rate(BC2_pores))[0], 16)
    assert round(sp.absolute(alg_2.rate(BC2_pores))[0], 16) == round(sp.absolute(sp.unique(alg_2['pore.'+air.name+'_bcval_Neumann']))[0]*len(BC1_pores), 16)
    assert round(sp.absolute(alg_3.rate(BC2_pores))[0], 16) == round(sp.absolute(sp.unique(alg_3['pore.'+air.name+'_bcval_Neumann_group']))[0], 16)
    assert round(sp.absolute(alg_4.rate(BC2_pores))[0], 16) == round(sp.absolute(sp.unique(alg_4['pore.'+air.name+'_bcval_Neumann_group']))[0], 16)

    assert round(sp.absolute(sp.sum(alg_1.rate(BC1_pores,mode='single'))),16) == round(sp.absolute(alg_1.rate(BC1_pores))[0],16)
    assert round(sp.absolute(sp.sum(alg_2.rate(BC2_pores,mode='single'))),16) == round(sp.absolute(alg_2.rate(BC2_pores))[0],16)
    assert round(sp.absolute(sp.sum(alg_3.rate(BC2_pores,mode='single'))),16) == round(sp.absolute(alg_3.rate(BC2_pores))[0],16)
    assert round(sp.absolute(sp.sum(alg_4.rate(BC2_pores,mode='single'))),16) == round(sp.absolute(alg_4.rate(BC2_pores))[0],16)
Example #11
0
def fit_dispersion(counts, disp_raw, disp_conv, sf, CFG, dmatrix1):

    mean_count = sp.mean(counts / sf, axis=1)[:, sp.newaxis]
    index = sp.where(disp_conv)[0]

    lowerBound = sp.percentile(sp.unique(disp_raw[index]), 1)
    upperBound = sp.percentile(sp.unique(disp_raw[index]), 99)

    idx = sp.where((disp_raw > lowerBound) & (disp_raw < upperBound))[0]

    matrix = sp.ones((idx.shape[0], 2), dtype='float')
    matrix[:, 0] /= mean_count[idx].ravel()

    modGamma = sm.GLM(disp_raw[idx], matrix, family=sm.families.Gamma(sm.families.links.identity))
    res = modGamma.fit()
    Lambda = res.params

    disp_fitted = disp_raw.copy()
    ok_idx = sp.where(~sp.isnan(disp_fitted))[0]
    disp_fitted[ok_idx] = Lambda[0] / mean_count[ok_idx] + Lambda[1]

    if sp.sum(disp_fitted > 0) > 0:
        print "Found dispersion fit"

    if CFG['diagnose_plots']:
        plot.mean_variance_plot(counts=counts,
                                disp=disp_fitted,
                                matrix=dmatrix1,
                                figtitle='Fitted Dispersion Estimate',
                                filename=os.path.join(CFG['plot_dir'], 'dispersion_fitted.pdf'),
                                CFG=CFG)

    return (disp_fitted, Lambda, idx)
Example #12
0
 def plot_setup(p):
     pylab.ylabel("Throughput gain [\%]")
     pylab.xscale('log', basex=2)
     pylab.xticks(
         list(scipy.unique(group['symbols'])),
         list(scipy.unique(group['symbols'])))
     plotter.set_markers(p)
     plotter.set_slave_info(slavename)
Example #13
0
    def find_neighbor_throats(self,pnums,flatten=True,mode='union'):
        r"""
        Returns a list of throats neighboring the given pore(s)

        Parameters
        ----------
        pnums : array_like
            Indices of pores whose neighbors are sought
        flatten : boolean, optional
            If flatten is True (default) a 1D array of unique throat ID numbers
            is returned. If flatten is False the returned array contains arrays
            of neighboring throat ID numbers for each input pore, in the order
            they were sent.
        mode : string, optional
            Specifies which neighbors should be returned.  The options are: 
            
            * 'union' : All neighbors of the input pores

            * 'intersection' : Only neighbors shared by all input pores 
            
            * 'not_intersection' : Only neighbors not shared by any input pores

        Returns
        -------
        neighborTs : 1D array (if flatten is True) or ndarray of arrays (if
            flatten if False)

        Examples
        --------
        >>> pn = OpenPNM.Network.Cubic(name='doc_test').generate(divisions=[5,5,5],lattice_spacing=[1])
        >>> pn.find_neighbor_throats(pnums=[0,1])
        array([0, 1, 2, 3, 4, 5])
        >>> pn.find_neighbor_throats(pnums=[0,1],flatten=False)
        array([array([0, 1, 2]), array([0, 3, 4, 5])], dtype=object)
        """
        #Test for existance of incidence matrix
        try:
            neighborTs = self.incidence_matrix['lil']['connections'].rows[[pnums]]
        except:
            self._logger.info('Creating incidence matrix, please wait')
            self.create_incidence_matrix()
            neighborTs = self.incidence_matrix['lil']['connections'].rows[[pnums]]
        if flatten:
            #All the empty lists must be removed to maintain data type after hstack (numpy bug?)
            neighborTs = [sp.asarray(x) for x in neighborTs if x]
            neighborTs = sp.hstack(neighborTs)
            #Remove references to input pores and duplicates
            if mode == 'not_intersection':
                neighborTs = sp.unique(sp.where(sp.bincount(neighborTs)==1)[0])
            elif mode == 'union':
                neighborTs = sp.unique(neighborTs)
            elif mode == 'intersection':
                neighborTs = sp.unique(sp.where(sp.bincount(neighborTs)>1)[0])
        else:
            for i in range(0,sp.size(pnums)):
                neighborTs[i] = sp.array(neighborTs[i])
        return sp.array(neighborTs,ndmin=1)
Example #14
0
 def test_no_late_filling(self):
     mip = op.algorithms.Porosimetry(network=self.net)
     mip.setup(phase=self.hg)
     mip.set_inlets(pores=self.net.pores('left'))
     mip.run()
     assert len(sp.unique(mip['pore.invasion_pressure'])) > 1
     assert len(sp.unique(mip['pore.invasion_sequence'])) > 1
     assert len(sp.unique(mip['throat.invasion_pressure'])) > 1
     assert len(sp.unique(mip['throat.invasion_sequence'])) > 1
Example #15
0
 def plot_setup(p):
     pylab.ylabel("Throughput" + " [" + list(group['unit'])[0] + "]")
     pylab.yscale('log')
     pylab.xscale('log', basex=2)
     pylab.xticks(
         list(scipy.unique(group['symbols'])),
         list(scipy.unique(group['symbols'])))
     plotter.set_markers(p)
     plotter.set_legend_columns(3)
    def evaluate_trapping(self, p_outlets):
        r"""
        Finds trapped pores and throats after a full ordinary
        percolation simulation has been run.

        Parameters
        ----------
        p_outlets : array_like
            A list of pores that define the wetting phase outlets.
            Disconnection from these outlets results in trapping.

        Returns
        -------
        It creates arrays called ``pore.trapped`` and ``throat.trapped``, but
        also adjusts the ``pore.inv_Pc`` and ``throat.inv_Pc`` arrays to set
        trapped locations to have infinite invasion pressure.

        """
        self['pore.trapped'] = sp.zeros([self.Np, ], dtype=float)
        self['throat.trapped'] = sp.zeros([self.Nt, ], dtype=float)
        try:
            # Get points used in OP
            inv_points = sp.unique(self['pore.inv_Pc'])
        except:
            raise Exception('Orindary percolation has not been run!')
        tind = self._net.throats()
        conns = self._net.find_connected_pores(tind)
        for inv_val in inv_points[0:-1]:
            # Find clusters of defender pores
            Pinvaded = self['pore.inv_Pc'] <= inv_val
            Cstate = sp.sum(Pinvaded[conns], axis=1)
            Tinvaded = self['throat.inv_Pc'] <= inv_val
            # 0 = all open, 1=1 pore filled,
            # 2=2 pores filled 3=2 pores + 1 throat filled
            Cstate = Cstate + Tinvaded
            clusters = self._net.find_clusters(Cstate == 0)
            # Clean up clusters (invaded = -1, defended >=0)
            clusters = clusters * (~Pinvaded) - (Pinvaded)
            # Identify clusters connected to outlet sites
            out_clusters = sp.unique(clusters[p_outlets])
            trapped_pores = ~sp.in1d(clusters, out_clusters)
            trapped_pores[Pinvaded] = False
            if sum(trapped_pores) > 0:
                inds = (self['pore.trapped'] == 0) * trapped_pores
                self['pore.trapped'][inds] = inv_val
                trapped_throats = self._net.find_neighbor_throats(trapped_pores)
                trapped_throat_array = np.asarray([False] * len(Cstate))
                trapped_throat_array[trapped_throats] = True
                inds = (self['throat.trapped'] == 0) * trapped_throat_array
                self['throat.trapped'][inds] = inv_val
                inds = (self['throat.trapped'] == 0) * (Cstate == 2)
                self['throat.trapped'][inds] = inv_val
        self['pore.trapped'][self['pore.trapped'] > 0] = sp.inf
        self['throat.trapped'][self['throat.trapped'] > 0] = sp.inf
        self['pore.inv_Pc'][self['pore.trapped'] > 0] = sp.inf
        self['throat.inv_Pc'][self['throat.trapped'] > 0] = sp.inf
Example #17
0
    def get_scan_IF_inds(self, scan_ind, IF_ind) :
        """Gets the record indices of the fits file that correspond to the
        given scan and IF.

        Note that the scans are numbered with 0 corresponding to the first scan
        in the file i.e., it is not the session scan number."""

        # TODO: Should check valid scan IF, and raise value errors as apropriate
        thescan = self.scan_set[scan_ind]
        theIF = self.IF_set[IF_ind]
        
        # Find all the records that correspond to this IF and this scan.
        # These indicies *should now be ordered in time, cal (on off)
        # and in polarization, once the IF is isolated.
        (inds_sif,) = sp.where(sp.logical_and(self._IFs_all==theIF, 
                                        self._scans_all==thescan))
        ncal = len(sp.unique(self.fitsdata.field('CAL')[inds_sif]))
        npol = len(sp.unique(self.fitsdata.field('CRVAL4')[inds_sif]))
        
        # Reform to organize by pol, cal, etc.
        ntimes = len(inds_sif)//npol//ncal
        inds_sif = sp.reshape(inds_sif, (ntimes, npol, ncal))

        if self.verify_ordering > 0:
            # We expect noise cal to be on for every second record.
            for thecal in range(ncal) :
                tmp = sp.unique(self.fitsdata.field('CAL')[inds_sif[:,:,thecal]])
                if len(tmp) > 1 :
                    raise ce.DataError("Calibration (ON/OFF) not in "
                                    "perfect order in file: "+self.fname)
            # Polarization should cycle through 4 modes (-5,-7,-8,-6)
            for thepol in range(npol) :
                tmp = sp.unique(self.fitsdata.field('CRVAL4')
                            [inds_sif[:,thepol,:]])
                if len(tmp) > 1 :
                    raise ce.DataError("Polarizations not in perfect order in "
                                    "file: "+self.fname)
            # We expect the entries to be sorted in time and for time to not
            # change across pol and cal.
            lastLST = 0
            for ii in range(ntimes) :
                # Sometimes won't have the LST.
                try :
                    thisLST = self.fitsdata.field('LST')[inds_sif[ii,0,0]]
                # If 'LST' is missing raises a KeyError in later versions of
                # pyfits, and a NameError in earlier ones.
                except (KeyError, NameError) :
                    break
                if not (sp.allclose(self.fitsdata.field('LST')
                        [inds_sif[ii,:,:]] - thisLST, 0)) :
                    raise ce.DataError("LST change across cal or pol in "
                                       "file: " + self.fname)

        return inds_sif
Example #18
0
def centre_of_mass(geometry, vertices='throat.offset_vertices', **kwargs):
    r"""
    Calculate the centre of mass of the throat from the voronoi vertices.
    """
    Nt = geometry.num_throats()
    outer_verts = geometry['throat.vertices']
    offset_verts = geometry[vertices]
    normal = geometry['throat.normal']
    z_axis = [0, 0, 1]
    value = _sp.ndarray([Nt, 3])
    for i in range(Nt):
        if len(offset_verts[i]) > 2:
            verts = offset_verts[i]
        elif len(outer_verts[i]) > 2:
            verts = outer_verts[i]
        else:
            verts = []
        if len(verts) > 0:
            # For boundaries some facets will already be aligned with the axis -
            # if this is the case a rotation is unnecessary and could also cause
            # problems
            angle = tr.angle_between_vectors(normal[i], z_axis)
            if angle == 0.0 or angle == _sp.pi:
                "We are already aligned"
                rotate_input = False
                facet = verts
            else:
                rotate_input = True
                M = tr.rotation_matrix(tr.angle_between_vectors(normal[i], z_axis),
                                       tr.vector_product(normal[i], z_axis))
                facet = _sp.dot(verts, M[:3, :3].T)
            # Now we have a rotated facet aligned with the z axis - make 2D
            facet_2D = _sp.column_stack((facet[:, 0], facet[:, 1]))
            z = _sp.unique(_sp.around(facet[:, 2], 10))
            if len(z) == 1:
                # We need the vertices arranged in order so perform a convex hull
                hull = ConvexHull(facet_2D)
                ordered_facet_2D = facet_2D[hull.vertices]
                # Call the routine to calculate an area wighted centroid from the
                # 2D polygon
                COM_2D = vo.PolyWeightedCentroid2D(ordered_facet_2D)
                COM_3D = _sp.hstack((COM_2D, z))
                # If we performed a rotation we need to rotate back
                if (rotate_input):
                    MI = tr.inverse_matrix(M)
                    # Unrotate the offset coordinates using the inverse of the
                    # original rotation matrix
                    value[i] = _sp.dot(COM_3D, MI[:3, :3].T)
                else:
                    value[i] = COM_3D
            else:
                print('Rotation Failed: ' + str(_sp.unique(facet[:, 2])))

    return value
Example #19
0
    def run(self, npts=25, inv_pressures=None):
        r"""
        Run the algorithm for specified number of points or at given capillary
        pressures.

        Parameters
        ----------
        npts : scalar
            The number of points to obtain on the curve.  The points are
            automatically selected to span the range of capillary pressures
            using a logarithmic spacing (more points are lower capillary
            pressure values).

        inv_pressures : array_like
            A list of capillary pressures to apply. List should contain
            increasing and unique values.
        """
        # If no invasion points are given then generate some
        if inv_pressures is None:
            logger.info('Generating list of invasion pressures')
            min_p = sp.amin(self['throat.entry_pressure']) * 0.98  # nudge down
            max_p = sp.amax(self['throat.entry_pressure']) * 1.02  # bump up
            inv_points = sp.logspace(sp.log10(min_p),
                                     sp.log10(max_p),
                                     npts)
        else:
            # Make sure the given invastion points are sensible
            inv_points = sp.unique(inv_pressures)
        self._inv_points = inv_points

        # Ensure inlets are set
        if sp.sum(self['pore.inlets']) == 0:
            raise Exception('Inlet pores have not been specified')

        # Ensure outlet pores are set if trapping is enabled
        if self._trapping:
            if sp.sum(self['pore.outlets']) == 0:
                raise Exception('Outlet pores have not been specified')

        # Generate curve from points
        for inv_val in self._inv_points:
            # Apply one applied pressure and determine invaded pores
            logger.info('Applying capillary pressure: ' + str(inv_val))
            self._apply_percolation(inv_val)
            if self._trapping:
                logger.info('Checking for trapping')
                self._check_trapping(inv_val)

        # Find invasion sequence values (to correspond with IP algorithm)
        Pinv = self['pore.inv_Pc']
        self['pore.inv_seq'] = sp.searchsorted(sp.unique(Pinv), Pinv)
        Tinv = self['throat.inv_Pc']
        self['throat.inv_seq'] = sp.searchsorted(sp.unique(Tinv), Tinv)
def make_plot_twoclass(X,Y,W,kernel):
    fig = plt.figure(figsize=(5,4))
    fig.clf()
    colors = "brymcwg"

    # Plot the decision boundary.
    h = .2 # stepsize in mesh
    x_min, x_max = X[0,:].min() - 1, X[0,:].max() + 1
    y_min, y_max = X[1,:].min() - 1, X[1,:].max() + 1
    xx, yy = sp.meshgrid(sp.arange(x_min, x_max, h),
            sp.arange(y_min, y_max, h))

    Z = predict_svm_kernel(sp.c_[sp.ones(xx.ravel().shape[-1]), xx.ravel(), yy.ravel()].T,sp.vstack((sp.ones((1,X.shape[-1])),X)),W,kernel).reshape(xx.shape)
    cs = plt.contourf(xx, yy, Z,alpha=.5)
    plt.axis('tight')
    plt.colorbar()
    plt.axis('equal')
    y = sp.maximum(0,-Y)+1
    # plot the data
    plt.hold(True)

    ypred = 	W.T.dot(kernel[0](X,X,kernel[1]).T)
    for ic in sp.unique(y):
        idx = (y == int(ic)).flatten()
        sv = (Y.flatten()[idx]*ypred[idx] < 1)
        plt.plot(X[0,idx.nonzero()[0][sv]], X[1,idx.nonzero()[0][sv]], colors[int(ic)]+'o',markersize=13)
        plt.plot(X[0,idx.nonzero()[0][~sv]], X[1,idx.nonzero()[0][~sv]], colors[int(ic)]+'o',markersize=7)
    plt.axis('tight')

    plt.xlabel('$X_1$')
    plt.ylabel('$X_2$')

    #plt.title('SVM, Accuracy=%0.2f'%(Y==sp.sign(ypred)).mean())

    #plt.show()
    plt.savefig('./svm_kernel.pdf')

    fig = plt.figure(figsize=(5,5))
    fig.clf()
    colors = "brymcwg"
    for ic in sp.unique(y):
        idx = (y == int(ic)).flatten()
        plt.plot(X[0,idx], X[1,idx], colors[int(ic)]+'o',markersize=8)
    plt.axis('tight')

    plt.xlabel('$X_1$')
    plt.ylabel('$X_2$')
    plt.xlim((x_min,x_max))
    plt.ylim((y_min,y_max))
    plt.grid()
    #plt.show()
    plt.savefig('./svm_kernel_xor_data.pdf')
Example #21
0
    def add_periodic_connections(self, pores1, pores2, apply_label='periodic'):
        r"""
        Accepts two sets of pores and connects them with new throats.  The
        connections are determined by pairing each pore in ``pores1`` with its
        nearest pore in ``pores2``.  For cubic Networks this will create
        pairings with pores directly across the domain from each other,
        assuming the input pores are 2D co-planar sets of pores.

        Parameters
        ----------
        pores_1 and pores_2 : array_like
            Lists of pores on the opposing faces which are to be linked to
            create periodicity.

        apply_label = string
            The label to apply to the newly created throats.  The default is
            'periodic'.

        Notes
        -----
        This method will raise an exception if the input pores do not create
        fully unique pairs.  Specifically, the length of pore_1 and pores_2
        must be the same AND each pore in pores_1 must pair up with one and
        only one pore in pores_2, and vice versa.  If these conditions are
        not met then periodicity cannot be acheived, and an exception is
        raised.

        """
        logger.debug('Creating periodic pores')
        if sp.shape(pores1)[0] != sp.shape(pores2)[0]:
            raise Exception('Unequal length inputs, periodicity not possible')
        p1 = self['pore.coords'][pores1]
        p2 = self['pore.coords'][pores2]
        dist_mat = sptl.distance_matrix(p1, p2)
        dist_min = sp.amin(dist_mat, axis=1, keepdims=True)
        [a, b] = sp.where(dist_mat == dist_min)
        pairs = sp.vstack([pores1[a], pores2[b]]).T
        # Confirm that each pore in each list is only paired up once
        temp_1 = sp.unique(pairs[:, 0])
        if sp.shape(temp_1) < sp.shape(pores1):
            raise Exception('Non-unique pairs found, periodicity not met')
        temp_2 = sp.unique(pairs[:, 1])
        if sp.shape(temp_2) < sp.shape(pores2):
            raise Exception('Non-unique pairs found, periodicity not met')
        # Add throats to the network for the periodic connections
        self.extend(throat_conns=pairs, labels=apply_label)
        # Create a list which pores are connected which
        self['pore.periodic_neighbor'] = sp.nan
        self['pore.periodic_neighbor'][pairs[:, 0]] = pairs[:, 1]
        self['pore.periodic_neighbor'][pairs[:, 1]] = pairs[:, 0]
        logger.info('Periodic boundary pores added successfully')
Example #22
0
def convertHDF5_2_VCF(arguments):
    if not os.path.isfile(arguments.hdata):
        print "Argument --hdata " + arguments.hdata + " is not a file\n"
        quit()
    if os.path.isfile(arguments.vout):
        print "File in --vout " + arguments.vout + " already exists. Please specify a different file!\n"
        quit()
    f = h5py.File(arguments.hdata,'r')
    raw = f['Genotype/raw'][:]
    positions = f['Genotype']['position_index'][:]
    chromosomes = f['Genotype']['chr_index'][:]
    identifiers = f['Genotype']['identifiers'][:]
    major_ref = False
    if "ref_allele" in f['Genotype'].keys():
        ref_allele = f['Genotype']['ref_allele'][:]
    else:
        major_ref = True
        print "No reference allele list in HDF5 file! Reference allele is set to the major allele!"
    f.close()

    out = open(arguments.vout,'w')
    snps_one_allele = 0
    snps_more_alleles = 0
    out.write("##Hyrbid VCF\n")
    out.write("#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n")
    for i in xrange(raw.shape[1]):
        stmp = sp.unique(raw[:,i])
        snps = []
        for s in stmp:
            nucs = iupac_map_reverse[s]
            for nuc in nucs:
                snps.append(nuc)
        snps = sp.array(snps)
        snps = sp.unique(snps)
        if snps.shape[0]<2:
            snps_one_allele += 1
            continue
        if snps.shape[0]>=3:
            snps_more_alleles += 1
            continue
        if major_ref==False:
            ind = sp.where(snps!=ref_allele[i])[0]
            out.write(str(chromosomes[i]) + "\t" + str(positions[i]) + "\t" + identifiers[i] + "\t" + ref_allele[i] + "\t" + snps[ind][0] + "\t.\t.\t.\n")
        else: #Use major allele
            ind1 = sp.where(snps[0]==raw[:,i])[0][0]
            ind2 = sp.where(snps[1]==raw[:,i])[0][0]
            minor_index = sp.argmin(sp.array([ind1,ind2]))
            major_index = sp.argmax(sp.array([ind1,ind2]))
            out.write(str(chromosomes[i]) + "\t" + str(positions[i]) + "\t" + identifiers[i] + "\t" + snps[major_index][0] + "\t" + snps[minor_index][0] + "\t.\t.\t.\n")
    out.close()
Example #23
0
 def _setup(self):
     
     sessions = unique([unit.session for unit in self.units])
     self._sessions = sessions
     self._rawdata = dict.fromkeys(sessions)
     
     for session in sessions:
         
         datadir = expanduser(session.path)
         # We might might be looking at all the units in this session
         session_units = [ unit for unit in self.units if unit in session.units ]
         tetrodes = unique([unit.tetrode for unit in session_units])
         
         data = _load_data(datadir, tetrodes)
         
         # Get all the processed behavior data from a Rat object
         bdata = data['bhv']
         rat = bhv.Rat(session.rat)
         rat.update(session.date, bdata)
         trial_data = rat.sessions['trials'][0]
         
         # Keep only the behavior trials we have neural data for
         sync = data['syn'].map_n_to_b_masked
         trial_data = trial_data.ix[sync.data[~sync.mask]]
         
         # Get the onsets from the neural data
         samp_rate = 30000.0
         n_onsets = data['ons'][~sync.mask]/samp_rate
         trial_data['n_onset'] = n_onsets
         
         for tetrode in tetrodes:
         
             units = [ unit for unit in session_units if unit.tetrode == tetrode ]
             clusters = [ data['cls'][tetrode][unit.cluster] for unit in units ]
             # Using izip saves memory!
             packed = izip(units, clusters)
             
             # For each trial, we want to grab all the spikes between PG and FG,
             # plus 30 seconds on either side
             for unit, cluster in packed:
                 times = _get_times(cluster['peaks'], trial_data)
                 trial_data[unit.id] = times
             
         # Get rid of all trials that take too long, setting it to 20 seconds
         delay_limit = 20
         delay = trial_data['C in'] - trial_data['PG in']
         trial_data = trial_data[delay < delay_limit]
     
         self._rawdata[session] = trial_data
    def _do_outer_iteration_stage(self, inv_points):
        # Generate curve from points
        for inv_val in inv_points:
            # Apply one applied pressure and determine invaded pores
            logger.info('Applying capillary pressure: ' + str(inv_val))
            self._do_one_inner_iteration(inv_val)

        # Find invasion sequence values (to correspond with IP algorithm)
        self['pore.inv_seq'] = sp.searchsorted(sp.unique(self['pore.inv_Pc']),
                                               self['pore.inv_Pc'])
        self['throat.inv_seq'] = sp.searchsorted(sp.unique(self['throat.inv_Pc']),
                                                 self['throat.inv_Pc'])

        if self._trapping:
            self.evaluate_trapping(self['pore.outlets'])
def set_axis_properties(p,metric,varying_parameter,group):

    #Set major x-axis label
    plt.xlabel(xlabel_names[varying_parameter])

    #Set x-axis scale
    xscale_args = xscale_arguments[(metric,varying_parameter)]
    plt.xscale(xscale_args[0],**xscale_args[1])

    #Set x-axis tick labels
    #Get tick values
    ticks = list(sp.unique(group[varying_parameter]))

    #If an item is not in the tick dictionary for the bar plot, add it
    if pltkind[(metric,varying_parameter)] is 'bar':
        for item in ticks:
            if item not in varying_xlabels[varying_parameter].keys():
                varying_xlabels[varying_parameter][item] = '$' + str(item) +'$'

    xlabels = [ varying_xlabels[varying_parameter][item] for item in ticks]

    if pltkind[(metric,varying_parameter)] is 'bar':
        p.set_xticks(sp.arange(len(ticks))+0.5)
        plt.setp(p.set_xticklabels(xlabels), rotation=0)
    else:
        plt.xticks(ticks,xlabels)

    plt.ylabel(ylabel_names[metric])
    plt.grid('on')
Example #26
0
def makeinputh5(Iono,basedir):
    """This will make a h5 file for the IonoContainer that can be used as starting
    points for the fitter. The ionocontainer taken will be average over the x and y dimensions
    of space to make an average value of the parameters for each altitude.
    Inputs
    Iono - An instance of the Ionocontainer class that will be averaged over so it can
    be used for fitter starting points.
    basdir - A string that holds the directory that the file will be saved to.
    """
    # Get the parameters from the original data
    Param_List = Iono.Param_List
    dataloc = Iono.Cart_Coords
    times = Iono.Time_Vector
    velocity = Iono.Velocity
    zlist,idx = sp.unique(dataloc[:,2],return_inverse=True)
    siz = list(Param_List.shape[1:])
    vsiz = list(velocity.shape[1:])

    datalocsave = sp.column_stack((sp.zeros_like(zlist),sp.zeros_like(zlist),zlist))
    outdata = sp.zeros([len(zlist)]+siz)
    outvel = sp.zeros([len(zlist)]+vsiz)
    #  Do the averaging across space
    for izn,iz in enumerate(zlist):
        arr = sp.argwhere(idx==izn)
        outdata[izn] = sp.mean(Param_List[arr],axis=0)
        outvel[izn] = sp.mean(velocity[arr],axis=0)

    Ionoout = IonoContainer(datalocsave,outdata,times,Iono.Sensor_loc,ver=0,
                            paramnames=Iono.Param_Names, species=Iono.Species,velocity=outvel)
    Ionoout.saveh5(basedir/'startdata.h5')
 def _do_outer_iteration_stage(self):
     #Generate curve from points
     for inv_val in self._inv_points:
         #Apply one applied pressure and determine invaded pores
         self._logger.info('Applying capillary pressure: '+str(inv_val))
         self._do_one_inner_iteration(inv_val)
     #Store results using networks' get/set method
     self.set_pore_data(prop='inv_Pc',data=self._p_inv)
     self.set_throat_data(prop='inv_Pc',data=self._t_inv)
     #Find invasion sequence values (to correspond with IP algorithm)
     self._p_seq = sp.searchsorted(sp.unique(self._p_inv),self._p_inv)
     self._t_seq = sp.searchsorted(sp.unique(self._t_inv),self._t_inv)
     self.set_pore_data(prop='inv_seq',data=self._p_seq)
     self.set_throat_data(prop='inv_seq',data=self._t_seq)
     #Remove temporary arrays and adjacency matrices
     del self._net.adjacency_matrix['csr']['invaded']
    def _do_one_inner_iteration(self, inv_val):
        r"""
        Determine which throats are invaded at a given applied capillary
        pressure.

        """
        # Generate a tlist containing boolean values for throat state
        Tinvaded = self['throat.entry_pressure'] <= inv_val
        # Find all pores that can be invaded at specified pressure
        [pclusters, tclusters] = self._net.find_clusters2(mask=Tinvaded,
                                                          t_labels=True)
        if self._AL:
            # Identify clusters connected to invasion sites
            inv_clusters = sp.unique(pclusters[self['pore.inlets']])
        else:
            # All clusters are invasion sites
            inv_clusters = pclusters
        inv_clusters = inv_clusters[inv_clusters >= 0]
        # Find pores on the invading clusters
        pmask = np.in1d(pclusters, inv_clusters)
        # Store current applied pressure in newly invaded pores
        pinds = (self['pore.inv_Pc'] == sp.inf) * (pmask)
        self['pore.inv_Pc'][pinds] = inv_val
        # Find throats on the invading clusters
        tmask = np.in1d(tclusters, inv_clusters)
        # Store current applied pressure in newly invaded throats
        tinds = (self['throat.inv_Pc'] == sp.inf) * (tmask)
        self['throat.inv_Pc'][tinds] = inv_val
        # Store total network saturation
        tsat = sp.sum(self._net['throat.volume'][self['throat.inv_Pc'] <= inv_val])
        psat = sp.sum(self._net['pore.volume'][self['pore.inv_Pc'] <= inv_val])
        total = sp.sum(self._net['throat.volume']) + sp.sum(self._net['pore.volume'])
        self['pore.inv_sat'][pinds] = (tsat + psat)/total
        self['throat.inv_sat'][tinds] = (tsat + psat)/total
Example #29
0
def generate_set_partitions(set_):
    """Generate all of the partitions of a set.
    
    This is a helper function that utilizes the restricted growth strings from
    :py:func:`generate_set_partition_strings`. The partitions are returned in
    lexicographic order.
    
    Parameters
    ----------
    set_ : :py:class:`Array` or other Array-like, (`m`,)
        The set to find the partitions of.
    
    Returns
    -------
    partitions : list of lists of :py:class:`Array`
        The number of elements in the outer list is equal to the number of
        partitions, which is the len(`m`)^th Bell number. Each of the inner lists
        corresponds to a single possible partition. The length of an inner list
        is therefore equal to the number of blocks. Each of the arrays in an
        inner list is hence a block.
    """
    set_ = scipy.asarray(set_)
    strings = generate_set_partition_strings(len(set_))
    partitions = []
    for string in strings:
        blocks = []
        for block_num in scipy.unique(string):
            blocks.append(set_[string == block_num])
        partitions.append(blocks)
    
    return partitions
Example #30
0
def unique_rows(arr):
    """Returns a copy of arr with duplicate rows removed.
    
    From Stackoverflow "Find unique rows in numpy.array."
    
    Parameters
    ----------
    arr : :py:class:`Array`, (`m`, `n`). The array to find the unique rows of.
    
    Returns
    -------
    unique : :py:class:`Array`, (`p`, `n`) where `p` <= `m`
        The array `arr` with duplicate rows removed.
    """
    b = scipy.ascontiguousarray(arr).view(
        scipy.dtype((scipy.void, arr.dtype.itemsize * arr.shape[1]))
    )
    try:
        dum, idx = scipy.unique(b, return_index=True)
    except TypeError:
        # Handle bug in numpy 1.6.2:
        rows = [_Row(row) for row in b]
        srt_idx = sorted(range(len(rows)), key=rows.__getitem__)
        rows = scipy.asarray(rows)[srt_idx]
        row_cmp = [-1]
        for k in xrange(1, len(srt_idx)):
            row_cmp.append(rows[k-1].__cmp__(rows[k]))
        row_cmp = scipy.asarray(row_cmp)
        transition_idxs = scipy.where(row_cmp != 0)[0]
        idx = scipy.asarray(srt_idx)[transition_idxs]
    return arr[idx]
Example #31
0
def filter_introns(introns, genes, options):

    ### build interval trees of all genes starts and ends
    chrms = sp.array([_.strand for _ in genes])
    strands = sp.array([_.chr for _ in genes])
    gene_trees = dict()
    for c in sp.unique(chrms):
        for s in sp.unique(strands):
            gene_trees[(c, s)] = it.IntervalTree()
            c_idx = sp.where((chrms == c) & (strands == s))[0]
            for i in c_idx:
                gene_trees[(c, s)][genes[i].start:genes[i].stop] = i

    ### match all introns agains trees and remove elements overlapping
    ### more than one gene on the same chr/strand
    cnt_tot = 0
    cnt_rem = 0
    strand_list = ['+', '-']
    offset = options.intron_edges['append_new_terminal_exons_len']
    for si, s in enumerate(strand_list):
        for i in range(introns.shape[0]):
            if introns[i, si].shape[0] == 0:
                continue
            k_idx = []
            cnt_tot += introns[i, si].shape[0]
            for j in range(introns[i, si].shape[0]):
                if len(gene_trees[(s, genes[i].chr)].overlap(
                        introns[i, si][j, 0] - offset,
                        introns[i, si][j, 1] + offset)) == 1:
                    k_idx.append(j)
            if len(k_idx) < introns[i, si].shape[0]:
                cnt_rem += (introns[i, si].shape[0] - len(k_idx))
                introns[i, si] = introns[i, si][k_idx, :]
    print(
        'removed %i of %i (%.2f percent) introns overlapping to no or multiple genes'
        % (cnt_rem, cnt_tot, cnt_rem / float(max(cnt_tot, 1)) * 100))

    return introns
Example #32
0
def intron_list_from_annotation(options):

    ### parse gff-file
    anno_dict = parse_anno_from_file(options)
    #intron_lists = sp.zeros((0, 4), dtype='str')
    intron_lists = []
    exon_map = dict()

    for g, gene in enumerate(anno_dict['genes']):
        if options.verbose and g > 0 and g % 100 == 0:
            print '.',
            if g % 1000 == 0:
                print '%i/%i' % (g, len(anno_dict['genes']))
        chrm = anno_dict['genes'][gene][0]
        strand = anno_dict['genes'][gene][1]
        if not chrm in exon_map:
            exon_map[chrm] = dict()
        for trans in anno_dict['gene2trans'][gene]:
            exons = []
            for exon in anno_dict['trans2exons'][trans]:
                exons.append(exon[1:])
            exons = sp.array(exons)
            s_idx = sp.argsort(exons[:, 0])
            exons = exons[s_idx, :]

            for e in range(exons.shape[0] - 1):
                ### intron_lists = [[chrm, strand, start, stop], [chrm, strand, start, stop], ...]
                #intron_lists = sp.r_[intron_lists, [[chrm, strand, str(exons[e, 1] + 1), str(exons[e + 1, 0])]]]
                intron_lists.append(
                    [chrm, strand,
                     str(exons[e, 1] + 1),
                     str(exons[e + 1, 0])])

            ### we assume, that an exon cannot occurr twice in the same transcript!
            ### the value in the dict is a binary encoding, if the left/right end is intronic 10 = 2 means, 5' end is intronic
            #if len(contig_list) == 1:
            #    exon_map[chrm][trans.id][contig_list[exon]] = 0 ### 00 -> should never occurr
            #elif exon == 0:
            #    exon_map[chrm][trans.id][contig_list[exon]] = 2 ### 10
            #elif exon == len(contig_list) - 1:
            #    exon_map[chrm][trans.id][contig_list[exon]] = 1 ### 01
            #else:
            #    exon_map[chrm][trans.id][contig_list[exon]] = 3 ### 11

    intron_lists = sp.array(intron_lists, dtype='str')
    ### make intron_list unique
    tmp, u_idx = sp.unique(row_strings(intron_lists), return_index=True)
    intron_lists = intron_lists[u_idx, :]

    return (intron_lists, exon_map)
Example #33
0
def changeData(shotlist, data, idx, name='shots2016'):

    tedata = loadTe()

    conn = sqlite3.connect(_dbname)
    
    unishots = scipy.unique(shotlist)

    for i in unishots:
        inp = shotlist == i
        gen(conn, tedata, data[inp], idx[inp], timelist[inp], int(i), name=name)


    conn.close()
Example #34
0
def snowball_round(G, seeds, myspace=False):
    """Function takes a base graph, and a list of seeds
    and builds out the network data by accessing the
    Google SocialGraph API."""
    t0 = time()
    if myspace:
        seeds = get_myspace_url(seeds)
    sb_data = []
    for s in range(0, len(seeds)):
        s_sg = get_sg(seeds[s])
        new_ego, pen = create_egonet(s_sg)  # Create ego net of seed
        # Compose new network data into old abse graph
        for p in pen:
            sb_data.append(p)
        if s < 1:
            sb_net = nx.compose(G, new_ego)
        else:
            sb_net = nx.compose(new_ego, sb_net)
        del new_ego
        if s == round(len(seeds) * 0.2):
            # Simple progress output, useful for long jobs
            sb_net.name = '20% complete'
            nx.info(sb_net)
            print 'AT: ' + strftime('%m/%d/%Y, %H:%M:%S', gmtime())
            print ''
        if s == round(len(seeds) * 0.4):
            sb_net.name = '40% complete'
            nx.info(sb_net)
            print 'AT: ' + strftime('%m/%d/%Y, %H:%M:%S', gmtime())
            print ''
        if s == round(len(seeds) * 0.6):
            sb_net.name = '60% complete'
            nx.info(sb_net)
            print 'AT: ' + strftime('%m/%d/%Y, %H:%M:%S', gmtime())
            print ''
        if s == round(len(seeds) * 0.8):
            sb_net.name = '80% complete'
            nx.info(sb_net)
            print 'AT: ' + strftime('%m/%d/%Y, %H:%M:%S', gmtime())
            print ''
        if s == len(seeds) - 1:
            print 'NEW NETWORK COMPLETE!'
            print 'AT: ' + strftime('%m/%d/%Y, %H:%M:%S', gmtime())
            sb_net.name = G.name + '--> '
    # Return newly discovered seeds
    sb_data = array(sb_data)
    sb_data.flatten()
    sb_data = unique(sb_data)
    nx.info(sb_net)
    return sb_net, sb_data
Example #35
0
def plotTransformedData(transformed=None,labels=None,filename="exercise1.pdf"):
    pl.figure()
    ind_l = sp.unique(labels)
    legend = []
    for i,label in enumerate(ind_l):
        ind = sp.where(label==labels)[0]
        plot = pl.plot(transformed[ind,0],transformed[ind,1],'.',color=plot_color[i])
        legend.append(plot)
    pl.legend(ind_l,scatterpoints=1,numpoints=1,prop={'size':8},ncol=6,loc="upper right",fancybox=True)
    pl.xlabel("Transformed X Values")
    pl.ylabel("Transformed Y Values")
    pl.grid(True)
    #Save File
    pl.savefig(filename)
Example #36
0
        def fit(self, X, y):
            self._fit_X = X
            self.classes_, self._fit_y = scipy.unique(y, return_inverse=True)

            if self.algorithm == "brute":
                pass
            elif self.algorithm == "kd_tree":
                self.tree = KDTree(X, leaf_size=self.leaf_size)
            elif self.algorithm == "ball_tree":
                self.tree = BallTree(X, leaf_size=self.leaf_size)
            else:
                raise ValueError("unrecognized algorithm: ",
                                 str(self.algorithm))
            return self
def entropy(values):
    """A slow way to calculate the entropy of the input values"""

    values = values.flatten()
    #calculate the probablility of a value in a vector
    vUni = sp.unique(values)
    vlen = len(vUni)
    lenval = float(len(values))

    FreqData = sp.zeros_like(vUni)
    for i in range(len(vUni)):
        FreqData[i] = sum(values == vUni[i]) / lenval

    return -(sum([FreqData[i] * math.log(FreqData[i], 2) for i in FreqData]))
Example #38
0
    def get_drainage_data(self):
        r'''
        '''
        if hasattr(self, '_iminv') is False:
            raise Exception('The \'run\' method has not been called yet')
        pts = sp.unique(self._iminv)[1:]
        vol = []
        for r in pts:
            vol.append(sp.sum(self._iminv >= r))
        vals = namedtuple('DrainageCurve', ('size', 'volume'))
        vals.size = pts
        vals.volume = sp.array(vol)

        return vals
def run3(shotlist, timelist, idx, name='shots2016', serial=True):
    conn = sqlite3.connect(_dbname2)

    unishots = scipy.unique(shotlist)
    te = loadTe()
    rzl = genInp()

    if serial:
        for i in unishots:
            print(i)
            inp = shotlist == i
            results = weights2(rzl, te, i, timelist[inp])

            writeData(idx[inp], results, conn, name)

        conn.close()
    else:

        index = 0
        lim = len(unishots)
        while index < lim:
            num = 35
            if lim - index < num:
                num = lim - index
                print(num)

            pool = multiprocessing.Pool(num)
            output = {}
            indexout = {}
            for i in xrange(num):
                inp = shotlist == unishots[index]
                if index < lim:
                    indexout[i] = idx[inp]
                    output[i] = pool.apply_async(
                        weights2, (rzl, te, unishots[index], timelist[inp]))
                index += 1

            pool.close()
            pool.join()
            results = scipy.array(
                [output[i].get() for i in output]
            )  #breakdown the 100 shot chunks and write the data to the sql database

            #return indexout, results, output
            print('   ')
            print('writing to shot: ' + str(inp))
            for i in xrange(len(results)):
                writeData(indexout[i], results[i], conn, name)

        conn.close()
Example #40
0
 def test_one_value_one_source(self):
     rt = op.algorithms.ReactiveTransport(network=self.net,
                                          phase=self.phase)
     rt.setup(r_tolerance=0.001, max_iter=5000,
              relaxation_source=1, relaxation_quantity=1)
     rt.settings.update({'conductance': 'throat.diffusive_conductance',
                         'quantity': 'pore.concentration'})
     rt.set_source(pores=self.net.pores('bottom'), propname='pore.reaction')
     rt.set_value_BC(pores=self.net.pores('top'), values=1.0)
     rt.run()
     x = [0.0011, 0.1260, 0.2508, 0.3757,
          0.5006, 0.6254, 0.7503, 0.8751, 1.0]
     y = sp.unique(sp.around(rt['pore.concentration'], decimals=4))
     assert sp.all(x == y)
Example #41
0
def load_events(options, event_info):

    event_list = []
    for event_type in sp.unique(event_info[:, 0]):
        (events, _) = cPickle.load(
            open(
                os.path.join(
                    options.outdir, 'merge_graphs_%s_C%s.pickle' %
                    (event_type, options.confidence)), 'r'))
        s_idx = sp.where(event_info[:, 0] == event_type)[0]
        for e in s_idx:
            event_list.append(events[int(event_info[e, 1])])

    return event_list
Example #42
0
 def test_largest_sphere_multiple_geometries(self):
     net = OpenPNM.Network.Cubic(shape=[10, 10, 10], spacing=[5, 5, 5])
     net['pore.coords'][net.pores('top')] += [0, 0, -3]
     geom2 = OpenPNM.Geometry.GenericGeometry(network=net,
                                              pores=net.pores('top'))
     geom2['pore.diameter'] = 1.0
     Ps = net.pores('top', mode='not')
     geom1 = OpenPNM.Geometry.GenericGeometry(network=net,
                                              pores=Ps,
                                              throats=net.Ts)
     mod = OpenPNM.Geometry.models.pore_diameter.largest_sphere
     geom1.models.add(propname='pore.diameter', model=mod, iters=15)
     assert sp.all(geom2['pore.diameter'] == 1.0)
     assert sp.all(sp.ceil(sp.unique(geom1['pore.diameter'])) == [3.0, 5.0])
Example #43
0
def patch_color_labels(s, freq=[1], cmap='Paired', shuffle=True):
    ''' color by freq of labels '''
    s.vColor = sp.zeros(s.vertices.shape)
    _, labels = sp.unique(s.labels, return_inverse=True)
    labels += 1
    colr = get_cmap(sp.amax(labels) + 1, cmap=cmap)
    s.vColor = s.vColor + 1
    perm1 = sp.mod(3511 * sp.arange(sp.amax(labels) + 1), sp.amax(labels) + 1)
    freq = sp.reshape(freq, (len(freq), 1))
    if shuffle == True:
        s.vColor = (1 - freq) + freq * sp.array(colr(perm1[labels])[:, :3])
    else:
        s.vColor = (1 - freq) + freq * sp.array(colr(labels)[:, :3])
    return s
Example #44
0
 def _compress_labels(self, label_array):
     # Make cluster number contiguous
     array = sp.array(label_array)
     if array.dtype != int:
         raise Exception('label_array must be intergers')
     min_val = sp.amin(array)
     if min_val >= 0:
         min_val = 0
     array = array + sp.absolute(min_val)
     nums = sp.unique(array)
     temp = sp.zeros((sp.amax(array)+1,))
     temp[nums] = sp.arange(0, sp.size(nums))
     array = temp[array].astype(array.dtype)
     return array
Example #45
0
def to_continuous_mapping(hyperedges, included=None):
    """
    Convert ids to continuous ids
    
    Parameters
    ----------
    hyperedges: list of lists
        list of lists containing hyperedges
    included: object, optional
        assign mapping of elements included in object, else do not take into account
    
    Returns
    -------
    mappings: list of dictionaries
        list of dictionaries in the form {index: id, } or {id: index, } for each partite
        ids are assigned based on the sorted order of keys_list
    mappings_range: tuple
        (start_index, end_index)
    """

    #extract unique ids from hyperedge list
    if len(sp.array(hyperedges).shape) == 2:  #uniform hypergraph
        keys_list = [
            sp.unique(sp.array(hyperedges)[:, i])
            for i in range(sp.array(hyperedges).shape[1])
        ]
    else:  #non uniform hypergraph
        keys_list = []
        for he in hyperedges:
            for ind, n in enumerate(he):
                if ind > len(keys_list) - 1:
                    keys_list.append(set())
                keys_list[ind].add(n)
        keys_list = map(list, keys_list)

    mappings_index_to_id = []
    mappings_id_to_index = []
    last_index = 0
    for i in range(len(keys_list)):
        l = keys_list[i]
        if included != None:
            l = [el for el in l if el in included[:, i]]
        mappings_index_to_id.append(
            dict(zip(range(last_index, last_index + len(l)), sorted(l))))
        mappings_id_to_index.append(
            dict(zip(sorted(l), range(last_index, last_index + len(l)))))
        last_index += len(l)

    return (mappings_id_to_index, mappings_index_to_id, (0, last_index - 1))
Example #46
0
def plot_scores(data,
                xcol,
                ycol,
                title="Evaluation scores",
                save_as=None,
                folder_name='figures',
                ax=None,
                legend='brief'):
    """
    Function to plot all the scores from different batch correction methods
    data: pd dataframe with all the scores
    xcol: name of the column to plot on the xaxis
    ycol: name of the column to plot on the yaxis
    title: plot title
    """
    if (len(sp.unique(data['sample'])) > 1):
        score_plot = sns.scatterplot(x=xcol,
                                     y=ycol,
                                     data=data,
                                     hue='method',
                                     style='sample',
                                     legend=legend,
                                     ax=ax)
    else:
        score_plot = sns.scatterplot(x=xcol,
                                     y=ycol,
                                     data=data,
                                     hue='method',
                                     legend=legend,
                                     ax=ax)
    # correct labels
    xlab = score_plot.get_xlabel()
    xlab = xlab.replace('_', ' ')
    ylab = score_plot.get_ylabel()
    ylab = ylab.replace('_', ' ')
    score_plot.set(xlabel=xlab, ylabel=ylab, title=title)
    # move the legend outside the plot
    if (legend != False):
        handles, names = score_plot.get_legend_handles_labels()
        score_plot.legend(handles,
                          names,
                          bbox_to_anchor=(1.05, 1),
                          loc=2,
                          borderaxespad=0.)
    if save_as is not None:
        plt.savefig(os.path.join(folder_name, save_as), bbox_inches='tight')
        plt.close()
    else:
        score_plot
Example #47
0
def fit_dispersion(counts, disp_raw, disp_conv, sf, CFG, dmatrix1):

    mean_count = sp.mean(counts / sf, axis=1)[:, sp.newaxis]
    index = sp.where(disp_conv)[0]

    lowerBound = sp.percentile(sp.unique(disp_raw[index]), 1)
    upperBound = sp.percentile(sp.unique(disp_raw[index]), 99)

    idx = sp.where((disp_raw > lowerBound) & (disp_raw < upperBound))[0]

    matrix = sp.ones((idx.shape[0], 2), dtype='float')
    matrix[:, 0] /= mean_count[idx].ravel()

    modGamma = sm.GLM(disp_raw[idx],
                      matrix,
                      family=sm.families.Gamma(sm.families.links.identity))
    res = modGamma.fit()
    Lambda = res.params

    disp_fitted = disp_raw.copy()
    ok_idx = sp.where(~sp.isnan(disp_fitted))[0]
    disp_fitted[ok_idx] = Lambda[0] / mean_count[ok_idx] + Lambda[1]

    if sp.sum(disp_fitted > 0) > 0:
        print "Found dispersion fit"

    if CFG['diagnose_plots']:
        plot.mean_variance_plot(counts=counts,
                                disp=disp_fitted,
                                matrix=dmatrix1,
                                figtitle='Fitted Dispersion Estimate',
                                filename=os.path.join(CFG['plot_dir'],
                                                      'dispersion_fitted.pdf'),
                                CFG=CFG)

    return (disp_fitted, Lambda, idx)
def measure_labelling(im, labelling, lis):
    """Given a one-channel image, a labelling and a map of labels onto logical indices, returns an array
    with the median value of the part of ``im`` labelled ``l`` stored at location ``lis[l]``."""
    labels = sp.unique(labelling)
    labels = labels[labels != -1]

    measurements = sp.ndimage.labeled_comprehension(im, labelling, labels,
                                                    sp.median, float, sp.nan)

    results = sp.zeros((64, 192))
    results[:, :] = sp.nan
    restricted_lis = lis[:, labels].astype(int)
    results[restricted_lis[0], restricted_lis[1]] = measurements

    return results
Example #49
0
 def _make_seeds(self, sizes):
     imresults = sp.zeros(sp.shape(self.image))
     print('Making seed array')
     print('0%|' + '-' * len(sizes) + '|100%')
     print('  |', end='')
     for r in sizes:
         print('.', end='')
         sys.stdout.flush()
         imseed = self._imdt > r
         # Trim clusters not connected in invading face(s)
         imlabels = spim.label(imseed)[0]
         inlets = []
         if sp.shape(self.image)[0] > 1:
             inlets.extend(sp.unique(imlabels[[0, -1], :, :]))
         if sp.shape(self.image)[1] > 1:
             inlets.extend(sp.unique(imlabels[:, [0, -1], :]))
         if sp.shape(self.image)[2] > 1:
             inlets.extend(sp.unique(imlabels[:, :, [0, -1]]))
         inlets = sp.unique(inlets)[1:]
         imseed = sp.in1d(imlabels, inlets)
         imseed = sp.reshape(imseed, sp.shape(self.image))
         imresults[(imresults == 0) * (imseed)] = r
     print('|')
     self._imseeds = imresults
def run(shotlist, timelist, idx, name='shots2016'):
    conn = sqlite3.connect(_dbname)

    unishots = scipy.unique(shotlist)
    te = loadTe()
    rzl = genInp()

    for i in unishots:
        print(i)
        inp = shotlist == i
        results = weights(rzl, te, i, timelist[inp])

        writeData(idx[inp], results, conn, name)

    conn.close()
Example #51
0
    def _get_throat_normal(self, verts):
        r"""
        With a Delaunay Tesselation the throat normal is usually the vector connecting neighbouring pores as the throat is defined
        by the plane which is equidistant from the 2 pores. However, if scaling of pore coordinates and vertices is introduced 
        this will alter the normals so they must be recalculated.
        The routine is passed a list of shared vertices which define the throat.
        The normal is calculated by performing a convex hull algorithm to return the vertices in hull order. A coordinate must be lost to
        do this as order only works in 2D.
        Then the vectors of 3 neighbouring points are worked out and the cross product is taken to define the normal
        """
        "verts may already be coplanar so check an take the other coords"
        if len(sp.unique(verts[:, 0])) == 1:
            verts_2d = np.vstack((verts[:, 1], verts[:, 2])).T
        elif len(sp.unique(verts[:, 1])) == 1:
            verts_2d = np.vstack((verts[:, 0], verts[:, 2])).T
        else:
            verts_2d = np.vstack((verts[:, 0], verts[:, 1])).T
        hull = ConvexHull(verts_2d)
        sorted_verts = verts[hull.vertices]
        v1 = sorted_verts[1] - sorted_verts[0]
        v2 = sorted_verts[-1] - sorted_verts[0]
        normal = sp.cross(v1, v2)

        return normal
Example #52
0
def check_fits(data_path):

    hdulist = pyfits.open(data_path, memmap=False)
    print hdulist

    mheader = hdulist[1].header

    for key in mheader.keys():
        print key, '\t', mheader[key]
    #print mheader['STT_IMJD']
    #print mheader['STT_SMJD']
    #print mheader['STT_OFFS']

    mheader = hdulist[0].header

    for key in mheader.keys():
        print key, '\t', mheader[key]
    #print mheader['STT_IMJD']
    #print mheader['STT_SMJD']
    #print mheader['STT_OFFS']

    for k in range(1, 2):
        tbdata = hdulist[k].data

        fieldlabel = []
        for i in range(hdulist[k].header['TFIELDS']): 
            fieldlabel.append(hdulist[k].header['TTYPE%d'%(i+1)])
        print fieldlabel
        #for i in range(len(tbdata)):
        #    print tbdata[i][fieldlabel[k]]
        for i in range(hdulist[k].header['TFIELDS']):
            print hdulist[k].header['TTYPE%d'%(i+1)]
            print sp.unique(tbdata.field(fieldlabel[i])).shape
            print tbdata.field(fieldlabel[i]).shape
            print tbdata.field(fieldlabel[i])
            print
 def plot_drainage_curve(self,
                         pore_volume='volume',
                         throat_volume='volume',
                         pore_label='all',
                         throat_label='all',
                         fig=None):
     r"""
     Plot drainage capillary pressure curve
     """
     try:
         PcPoints = sp.unique(self['pore.inv_Pc'])
     except:
         raise Exception(
             'Cannot print drainage curve: ordinary percolation \
                          simulation has not been run')
     pores = self._net.pores(labels=pore_label)
     throats = self._net.throats(labels=throat_label)
     Snwp_t = sp.zeros_like(PcPoints)
     Snwp_p = sp.zeros_like(PcPoints)
     Snwp_all = sp.zeros_like(PcPoints)
     Pvol = self._net['pore.' + pore_volume]
     Tvol = self._net['throat.' + throat_volume]
     Pvol_tot = sp.sum(Pvol)
     Tvol_tot = sp.sum(Tvol)
     vol_tot = Pvol_tot + Tvol_tot
     for i in range(0, sp.size(PcPoints)):
         Pc = PcPoints[i]
         Snwp_p[i] = sp.sum(
             Pvol[self['pore.inv_Pc'][pores] <= Pc]) / vol_tot
         Snwp_t[i] = sp.sum(
             Tvol[self['throat.inv_Pc'][throats] <= Pc]) / vol_tot
         Snwp_all[i] = (
             sp.sum(Tvol[self['throat.inv_Pc'][throats] <= Pc]) +
             sp.sum(Pvol[self['pore.inv_Pc'][pores] <= Pc])) / vol_tot
     if sp.mean(self._inv_phase['pore.contact_angle']) < 90:
         Snwp_p = 1 - Snwp_p
         Snwp_t = 1 - Snwp_t
         Snwp_all = 1 - Snwp_all
         PcPoints *= -1
     if fig is None:
         fig = plt.figure()
     plt.plot(PcPoints, Snwp_all, 'g.-')
     plt.plot(PcPoints, Snwp_p, 'r.-')
     plt.plot(PcPoints, Snwp_t, 'b.-')
     r"""
     TODO: Add legend to distinguish the pore and throat curves
     """
     return fig
Example #54
0
 def _remove_disconnected_clusters(self):
     bad_pores = sp.array([],dtype=int)
     self._pore_map = self.pores()
     self._throat_map = self.throats()
     health = self.check_network_health()
     if health['disconnected_clusters'] == []:
         self._throat_map = self.throats()
         self._pore_map = self.pores()
     else:
         Np = self.num_pores()
         Nt = self.num_throats()
         cluster_sizes = [sp.shape(x)[0] for x in health['disconnected_clusters']]
         acceptable_size = min([min([50,Np/2]),max(cluster_sizes)]) # 50 or less, if it's a really small network.
         #step through each cluster of pores. If its a small cluster, add it to the list
         for cluster in health['disconnected_clusters']:
             if sp.shape(cluster)[0] < acceptable_size:
                 bad_pores = sp.append(bad_pores,sp.ravel(cluster))
         bad_throats = sp.unique(self.find_neighbor_throats(bad_pores))
         #Create map for pores
         if sp.shape(bad_pores)[0] > 0:
             i = 0
             self._pore_map = sp.zeros((Np-sp.shape(bad_pores)[0],),dtype=int)
             for pore in self.pores():
                 if pore not in bad_pores:
                     self._pore_map[i] = pore
                     i += 1
         #Create map for throats
         if sp.shape(bad_throats)[0] > 0:
             i = 0
             self._throat_map = sp.zeros((Nt-sp.shape(bad_throats)[0],),dtype=int)
             for throat in self.throats():
                 if throat not in bad_throats:
                     self._throat_map[i] = throat
                     i += 1
         #Fix the pore transformer
         try:
             if sp.shape(bad_pores)[0] > 0:
                 i = 0
                 old_transform = self._dictionary['pname_transform']
                 self._dictionary['pname_transform'] = sp.zeros((Np-sp.shape(bad_pores)[0],),dtype=int)
                 for pore in self.pores():
                     if pore not in bad_pores:
                         self._dictionary['pname_transform'][i] = old_transform[pore]
                         i += 1
         except:
             logger.info('Could not update pname_transform. Imported network may not have had it.')
             pass
         self.trim(pores=bad_pores)
Example #55
0
def reduced_rank_LDA(XTrain, yTrain, XTest, yTest):
    K = len(sp.unique(yTrain))
    N = XTrain.shape[0]
    p = XTrain.shape[1]

    PiK = sp.zeros((K, 1))
    M = sp.zeros((K, p))
    ScatterMatrix = []
    for ci in range(1, K+1):
        inds = sp.nonzero(yTrain == ci)
        Nci = len(inds[0])
        PiK[ci-1] = Nci / N
        #print XTrain[inds, :]
        M[ci-1, :] = sp.mean(XTrain[inds[0], :], 0)
    print PiK
    print M
def entropy(values):
    """A slow way to calculate the entropy of the input values"""

    values = sp.asarray(values).flatten()
    #calculate the probablility of a value in a vector
    vUni = sp.unique(values)
    lenval = float(values.size)

    FreqData = sp.zeros(vUni.shape, dtype=float)
    for i in xrange(FreqData.size):
        FreqData[i] = sum(values == vUni[i]) / lenval

    return -sum([
        FreqData[i] * sp.math.log(FreqData[i], 2)
        for i in xrange(FreqData.size)
    ])
Example #57
0
def pred_accuracy(y_true, y_pred):
    y_true = sp.copy(y_true)
    if len(sp.unique(y_true)) == 2:
        print 'dichotomous trait, calculating AUC'
        y_min = y_true.min()
        y_max = y_true.max()
        if y_min != 0 or y_max != 1:
            y_true[y_true == y_min] = 0
            y_true[y_true == y_max] = 1
        fpr, tpr, thresholds = metrics.roc_curve(y_true, y_pred)
        auc = metrics.auc(fpr, tpr)
        return auc
    else:
        print 'continuous trait, calculating COR'
        cor = sp.corrcoef(y_true, y_pred)[0, 1]
        return cor
Example #58
0
 def test_gets_records(self):
     for scan_ind in range(2):
         for IF_ind in range(2):
             inds = self.FileProcessor.get_scan_IF_inds(scan_ind, IF_ind)
             IFs = self.IFs_all[inds]
             scans = self.scans_all[inds]
             # Verify we got all of them.
             self.assertEqual(sp.size(inds), npol * ncal * ntimes_scan)
             # Verify they are all unique.
             self.assertEqual(len(sp.unique(inds)),
                              npol * ncal * ntimes_scan)
             # Check that they are all right.
             wrong_scan = sp.where(sp.not_equal(scans, scan_set[scan_ind]))
             wrong_IF = sp.where(sp.not_equal(IFs, IF_set[IF_ind]))
             self.assertEqual(len(wrong_scan[0]), 0)
             self.assertEqual(len(wrong_IF[0]), 0)
Example #59
0
def local_thickness(im):
    if im.ndim == 2:
        from skimage.morphology import square
    dt = spim.distance_transform_edt(im)
    sizes = sp.unique(sp.around(dt, decimals=0))
    im_new = sp.zeros_like(im, dtype=float)
    for r in tqdm(sizes):
        im_temp = dt >= r
        im_temp = spim.distance_transform_edt(~im_temp) <= r
        im_new[im_temp] = r
        #Trim outer edge of features to remove noise
    if im.ndim == 3:
        im_new = spim.binary_erosion(input=im, structure=ball(1))*im_new
    if im.ndim == 2:
        im_new = spim.binary_erosion(input=im, structure=disc(1))*im_new
    return im_new
Example #60
0
    def _parse_points(self, shape, points, num_points):
        # Deal with input arguments
        if points is None:
            if num_points is None:
                raise Exception('Must specify either "points" or "num_points"')
            points = topotools.generate_base_points(num_points=num_points,
                                                    domain_size=shape,
                                                    reflect=True)
        else:
            # Should we check to ensure that points are reflected?
            points = sp.array(points)

        # Deal with points that are only 2D...they break Delaunay
        if points.shape[1] == 3 and len(sp.unique(points[:, 2])) == 1:
            points = points[:, :2]

        return points