예제 #1
0
 def __init__(self,a,b):
     self.a=a
     self.b=b
     self.dir=None
     self.setDir()
     subtrArray = sp.dstack([self.a,-self.b]).sum(2)
     self.length = sp.sqrt(sp.sum([n**2 for n in subtrArray]))
예제 #2
0
def jplot2d(*args):
	arg=' -2D -l SOUTH '
	i=0
	while (i<len(args)):
		i_arg=0
		q=False
		arg_txt=True
		while ((len(args)>(i_arg+i+1)) & arg_txt):
			if (type(args[i+i_arg+1]) is str):
				i_arg+=1
				arg=arg+' '+args[i+i_arg]
			else:
				arg_txt=False
				
		name='.'+str(scipy.stats.random_integers(10000))+'.plot.tmp'
		
		if ((len(args[i])==scipy.size(args[i]))):
			a=scipy.dstack((scipy.arange(1,scipy.size(args[i])+1),args[i]))[0]
			scipy.io.write_array(name,a)
		else:
			scipy.io.write_array(name,args[i])
			
		i+=i_arg+1
		arg=arg+' '+name
	
	os.system('java -cp jmathplot.jar org.math.plot.PlotPanel '+arg)
예제 #3
0
def maskplot(index, data, mask, color='black', styles=['solid', 'dotted']):
    """ Creates a plot with masked-out intervals in dotted or custom line style,
    non-masked segments in dashed or custom line style, all in given color.
    Built from the example at
    http://matplotlib.org/examples/pylab_examples/multicolored_line.html
    """
    points = sp.array([index, data]).T.reshape(-1, 1, 2)
    segments = sp.concatenate([points[:-1], points[1:]], axis=1)
    mask = mask.reshape(-1, 1)
    mask = (sp.ones_like(mask) - mask).astype(bool)
    mask = sp.hstack((mask, mask))
    mask = sp.dstack((mask, mask))
    mask = mask[1:]
    #print segments.shape, mask.shape
    mask1 = sp.ma.masked_where(mask, segments)
    mask2 = sp.ma.masked_where(
        (-mask.astype(float)+1.).astype(bool), segments)

    ls1 = LineCollection(
        mask1,
        colors=color,
        linestyles=styles[0])

    ls2 = LineCollection(
        mask2,
        colors=colorConverter.to_rgba(color),
        linestyles=styles[1])

    return ls1, ls2
예제 #4
0
파일: Capture.py 프로젝트: jlettvin/RPN
    def process(self, sbmp):
        """Core function"""
        if sbmp == None:
            # Oversize required
            return self.size
        # reverse height and width under advice
        (ws, hs, ps)    = shape = (sbmp.GetHeight(), sbmp.GetWidth(), 3)
        simg            = wx.ImageFromBitmap(sbmp)

        sarray          = scipy.array(scipy.fromstring(simg.GetData(), 'uint8'), self.dtype) / 255.0
        sarray          = scipy.rollaxis(scipy.reshape(sarray, shape), 2)
        self.shape      = sarray.shape

        tarray          = (self.withGPU if self.gpgpu else self.withCPU)(sarray)
        mm              = (sarray.min(), sarray.max(), tarray.min(), tarray.max())
        #print '\t', sarray.shape, tarray.shape,
        print type(sarray[0,0,0]), type(tarray[0,0,0]), mm

        tarray          = numpy.nan_to_num(tarray)
        tarray         /= max(tarray.max(), self.coefficient)
        tarray          = scipy.array((tarray * 255.0).tolist(), 'uint8')

        tarray          = scipy.dstack(tarray)
        timg            = wx.EmptyImage(ws, hs)
        timg              .SetData(tarray.tostring())
        self.tbmp       = timg.ConvertToBitmap()
        return self.tbmp
예제 #5
0
파일: jplot.py 프로젝트: jtwala/cs4852
def jplot2d(*args):
    arg = ' -2D -l SOUTH '
    i = 0
    while (i < len(args)):
        i_arg = 0
        q = False
        arg_txt = True
        while ((len(args) > (i_arg + i + 1)) & arg_txt):
            if (type(args[i + i_arg + 1]) is str):
                i_arg += 1
                arg = arg + ' ' + args[i + i_arg]
            else:
                arg_txt = False

        name = '.' + str(scipy.stats.random_integers(10000)) + '.plot.tmp'

        if ((len(args[i]) == scipy.size(args[i]))):
            a = scipy.dstack(
                (scipy.arange(1,
                              scipy.size(args[i]) + 1), args[i]))[0]
            scipy.io.write_array(name, a)
        else:
            scipy.io.write_array(name, args[i])

        i += i_arg + 1
        arg = arg + ' ' + name

    os.system('java -cp jmathplot.jar org.math.plot.PlotPanel ' + arg)
예제 #6
0
 def __init__(self, a, b):
     self.a = a
     self.b = b
     self.dir = None
     self.setDir()
     subtrArray = sp.dstack([self.a, -self.b]).sum(2)
     self.length = sp.sqrt(sp.sum([n**2 for n in subtrArray]))
예제 #7
0
def square_lattice(L):
    '''
    Generates an array of points making up a square LxL lattice.
    
    :param L: the dimension of the square lattice.
    '''
    x = sp.linspace(0, L, L + 1)
    coord_arrays = sp.meshgrid(x, x)
    polygon = (sp.dstack(coord_arrays))
    polygon = sp.reshape(polygon, ((L + 1)**2, 2))
    return polygon
예제 #8
0
    def __init__(self, train, T=10.0, grid_size=0.1):
        """Constructor for PredPol object.

        Args:
            train: a pandas DataFrame where each row is an observed crime, used
            to learn the model parameters. Must contain at least the following
            columns:
                t: numeric, indicating the time of the crime. All values must be
                less than the `T` supplied as an argument to this constructor.
                x: numeric, indicating the horizontal position of the crime.
                y: numeric, indicating the vertical position of the crime.
            For numerical safety (avoiding overflows), it's recommended that t,
            x, and y are all normalized to have relatively small values.

            T: float, indicating the maximum `t` value in the _combined_ train
            and test set of data.

            grid_size: float, indicating the level of discretization/total
            number of grid cells in the PredPol model. Smaller grid sizes have
            more grid cells.
        Returns: an initialized PredPol object before fitting parameter values.
        """
        self.train = train
        self.T = T
        self.height = train.y.max()
        self.width = train.x.max()
        self.grid_size = grid_size

        # Generate the center of each grid cell
        x_vals = sp.arange(0, self.width, self.grid_size)
        y_vals = sp.arange(0, self.height, self.grid_size)
        diff = round(self.grid_size / 2,
                     -int(math.floor(math.log10(self.grid_size))) + 1)
        x_vals += diff
        y_vals += diff

        xs, ys = sp.meshgrid(x_vals, y_vals)
        # print(xs)
        # print(ys)
        self.grid_cells = sp.vstack(sp.dstack((xs, ys)))
        self.grid_cells = pd.DataFrame(self.grid_cells, columns=['x', 'y'])

        self.interpolator = RegularGridInterpolator(
            points=(x_vals, y_vals),
            values=sp.arange(1,
                             len(self.grid_cells) + 1).reshape(
                                 (len(y_vals), len(x_vals))).transpose(),
            method='nearest',
            bounds_error=False,
            fill_value=None)

        self.eta = None
        self.omega = None
        self.sigma = None
예제 #9
0
def square_lattice(L):
    '''
    Generates an array of points making up a square LxL lattice.
    
    :param L: the dimension of the square lattice.
    '''
    x = sp.linspace(0,L,L+1)
    coord_arrays = sp.meshgrid(x,x)
    polygon = (sp.dstack(coord_arrays))
    polygon = sp.reshape(polygon,((L+1)**2,2))    
    return polygon
    def __init__(self,
                 loc1,
                 loc2,
                 scale1,
                 scale2,
                 xmin,
                 xmax,
                 npts=100,
                 plot=False):

        #Sample spacec for plotting and interpolating
        x_eval_space = sp.linspace(xmin, xmax, npts)
        y_eval_space = sp.linspace(xmin, xmax, npts)
        if plot: print('Done with linspace')
        x_eval, y_eval = sp.meshgrid(x_eval_space, y_eval_space)
        xy_eval = sp.dstack((x_eval, y_eval))
        if plot: print('Done with dstack')
        #Create a bimodal pdf
        bimodal_pdf = pdf(xy_eval, mean=loc1, cov=scale1)*0.5 + \
                      pdf(xy_eval, mean=loc2, cov=scale2)*0.5
        if plot: print('Done with pdf')
        bimodal_cdf = cdf(xy_eval, mean=loc1, cov=scale1)*0.5 + \
                      cdf(xy_eval, mean=loc2, cov=scale2)*0.5
        if plot: print('Done with cdf')

        #Make sure the cdf is bounded before interpolating the inverse
        bimodal_cdf[-1, -1] = 1
        bimodal_cdf[-1, 0] = 0
        self.ppfx = interpolate.interp1d(bimodal_cdf[-1, :], x_eval_space)
        self.ppfix = interpolate.interp1d(x_eval_space, sp.arange(npts))
        if plot: print('Done building interpolator')

        #Store the data
        self.x_eval = x_eval
        self.y_eval = y_eval
        self.x_eval_space = x_eval_space
        self.y_eval_space = y_eval_space
        self.bimodal_pdf = bimodal_pdf
        self.bimodal_cdf = bimodal_cdf
        return
예제 #11
0
    def _compare_evolutions(self,
                            evolution1,
                            evolution2,
                            *,
                            primary_to_secondary=False,
                            flip=True,
                            min_age=-scipy.inf,
                            max_age=scipy.inf):
        """
        Require the two evolutions match for a, and primary Lconv & Lrad.

        Args:
            evolution1:    The first of the two evolutions to compare.

            evolution2:    The second of the two evolutions to compare.

            primary_to_secondary:    If False compares the evolution of the
                primary object in both evolutions, otherwise compares the
                evolution of the primary in evolution1 to the evolution of the
                secondary in evolution2.

            flip:    If true, also calls itself with the two evolutions swapped.

            min_age:    Discrepancies at ages before this are ignored.

            min_age:    Discrepancies at ages after this are ignored.
        """

        def output_failing(data):
            """Output to stdout the discrepant evolutions."""

            for i in range(max(d.shape[0] for d in data)):
                print(
                    (
                        '%25.16e %25.16e' % tuple(data[0][i])
                        if i < data[0].shape[0] else
                        (51 * ' ')
                    )
                    +
                    ' '
                    +
                    (
                        '%25.16e %25.16e' % tuple(data[1][i])
                        if i < data[1].shape[0] else
                        (51 * ' ')
                    )
                )


        for quantity_name in [
                ('semimajor', 'semimajor'),
                ('envelope_angmom', 'primary_envelope_angmom'),
                ('core_angmom', 'primary_core_angmom')
        ]:
            with self.subTest(quantity=quantity_name[0], flipped=not flip):
                if primary_to_secondary:
                    quantity = [
                        getattr(
                            evolution1, quantity_name[0],
                            getattr(evolution1, quantity_name[1], None)
                        ),
                        getattr(
                            evolution2,
                            (
                                (
                                    '' if quantity_name[0] == 'semimajor'
                                    else 'secondary_'
                                )
                                +
                                quantity_name[0]
                            )
                        )
                    ]
                else:
                    quantity = [
                        getattr(evol, quantity_name[0],
                                getattr(evol, quantity_name[1], None))
                        for evol in [evolution1, evolution2]
                    ]
                age_within_range = [
                    scipy.logical_and(evol.age[:-1] > min_age,
                                      evol.age[:-1] < max_age)
                    for evol in [evolution1, evolution2]
                ]

                acceptable_ages = scipy.logical_and(
                    age_within_range[0],
                    scipy.logical_and(
                        (evolution1.age[:-1] - evolution1.age[1:]) < 0,
                        scipy.isfinite(quantity[0][:-1])
                    )
                )
                interp_quantity = InterpolatedUnivariateSpline(
                    evolution1.age[:-1][acceptable_ages],
                    quantity[0][:-1][acceptable_ages]
                )

                max_difference = scipy.nanmax(
                    scipy.fabs(
                        quantity[1][:-1][age_within_range[1]]
                        -
                        interp_quantity(evolution2.age[:-1][age_within_range[1]])
                    )
                )
                max_error = max(
                    (
                        scipy.nanmax(quantity[1][:-1][age_within_range[1]])
                        -
                        scipy.nanmin(quantity[1][:-1][age_within_range[1]])
                    ) * 5e-3,
                    1e-10 * scipy.nanmean(quantity[1][:-1][age_within_range[1]])
                )
                if max_difference > max_error:
                    output_failing(
                        [
                            scipy.dstack(
                                (
                                    evolution1.age[:-1][age_within_range[0]],
                                    quantity[0][:-1][age_within_range[0]]
                                )
                            )[0],
                            scipy.dstack(
                                (
                                    evolution2.age[:-1][age_within_range[1]],
                                    quantity[1][:-1][age_within_range[1]]
                                )
                            )[0]
                        ]
                    )
                self.assertLessEqual(max_difference, max_error)

        if flip:
            self._compare_evolutions(evolution2,
                                     evolution1,
                                     primary_to_secondary=primary_to_secondary,
                                     min_age=min_age,
                                     max_age=max_age,
                                     flip=False)
예제 #12
0
파일: verify.py 프로젝트: xtmgah/spladder
def verify_all_events(ev,
                      strain_idx=None,
                      list_bam=None,
                      event_type=None,
                      CFG=None,
                      out_fn=None):
    # (ev, counts) = verify_all_events(ev, strain_idx, list_bam, event_type, CFG) ;

    ### set parameters if called by rproc
    if strain_idx is None:
        PAR = ev
        ev = PAR['ev']
        strain_idx = PAR['strain_idx']
        list_bam = PAR['list_bam']
        if 'out_fn' in PAR:
            out_fn = PAR['out_fn']
        event_type = PAR['event_type']
        CFG = PAR['CFG']

    ### verify the events if demanded
    if CFG['verify_alt_events']:

        prune_tag = ''
        if CFG['do_prune']:
            prune_tag = '_pruned'
        validate_tag = ''
        if CFG['validate_splicegraphs']:
            validate_tag = '.validated'

        (genes, inserted) = cPickle.load(
            open('%s/spladder/genes_graph_conf%i.%s%s%s.pickle' %
                 (CFG['out_dirname'], CFG['confidence_level'],
                  CFG['merge_strategy'], validate_tag, prune_tag)))

        fn_count = '%s/spladder/genes_graph_conf%i.%s%s%s.count.pickle' % (
            CFG['out_dirname'], CFG['confidence_level'], CFG['merge_strategy'],
            validate_tag, prune_tag)
        ### load count index data from hdf5
        IN = h5py.File(fn_count, 'r')
        gene_ids_segs = IN['gene_ids_segs'][:]
        gene_ids_edges = IN['gene_ids_edges'][:]

        ### sort events by gene idx
        s_idx = sp.argsort([x.gene_idx for x in ev])
        ev = ev[s_idx]
        old_idx = sp.argsort(s_idx)

        ### find gene idx boundaries
        assert (isequal(gene_ids_segs, sp.sort(gene_ids_segs)))
        assert (isequal(gene_ids_edges, sp.sort(gene_ids_edges)))

        tmp, genes_f_idx_segs = sp.unique(gene_ids_segs, return_index=True)
        genes_l_idx_segs = sp.r_[genes_f_idx_segs[1:] - 1,
                                 gene_ids_segs.shape[0]]

        tmp, genes_f_idx_edges = sp.unique(gene_ids_edges, return_index=True)
        genes_l_idx_edges = sp.r_[genes_f_idx_edges[1:] - 1,
                                  gene_ids_edges.shape[0]]

        gr_idx_segs = 0
        gr_idx_edges = 0
        counts = []
        for i in range(ev.shape[0]):
            g_idx = ev[i].gene_idx

            ### there are no edges present in the event
            if gene_ids_edges.shape[0] == 0:
                ver, info = verify_empty(event_type)
                counts.append(sp.array([info]))
                ev[i].verified = sp.array(ev[i].verified)
                continue

            while gene_ids_segs[genes_f_idx_segs[gr_idx_segs]] < g_idx:
                gr_idx_segs += 1
            assert (gene_ids_segs[genes_f_idx_segs[gr_idx_segs]] == g_idx)

            while gene_ids_edges[genes_f_idx_edges[gr_idx_edges]] < g_idx:
                gr_idx_edges += 1
            if gr_idx_edges > g_idx:
                ver, info = verify_empty(event_type)
                counts.append(sp.array([info]))
                ev[i].verified = sp.array(ev[i].verified)
                continue
            assert (gene_ids_edges[genes_f_idx_edges[gr_idx_edges]] == g_idx)

            ### laod relevant count data from HDF5
            segments = IN['segments'][
                genes_f_idx_segs[gr_idx_segs]:genes_l_idx_segs[gr_idx_segs] +
                1, strain_idx]
            seg_pos = IN['seg_pos'][
                genes_f_idx_segs[gr_idx_segs]:genes_l_idx_segs[gr_idx_segs] +
                1, strain_idx]
            edges = IN['edges'][genes_f_idx_edges[gr_idx_edges]:
                                genes_l_idx_edges[gr_idx_edges] + 1,
                                strain_idx]
            edge_idx = IN['edge_idx'][genes_f_idx_edges[gr_idx_edges]:
                                      genes_l_idx_edges[gr_idx_edges] + 1]

            for s_idx in range(len(strain_idx)):
                print '%i/%i\r' % (s_idx, len(strain_idx))
                # ev_tmp.subset_strain(s_idx) ### TODO
                if event_type == 'exon_skip':
                    ver, info = verify_exon_skip(
                        ev[i], genes[g_idx], segments[:, s_idx].T,
                        sp.c_[edge_idx, edges[:, s_idx]], CFG)
                elif event_type in ['alt_3prime', 'alt_5prime']:
                    ver, info = verify_alt_prime(
                        ev[i], genes[g_idx], segments[:, s_idx].T,
                        sp.c_[edge_idx, edges[:, s_idx]], CFG)
                elif event_type == 'intron_retention':
                    ver, info = verify_intron_retention(
                        ev[i], genes[g_idx], segments[:, s_idx].T,
                        sp.c_[edge_idx,
                              edges[:, s_idx]], seg_pos[:, s_idx].T, CFG)
                elif event_type == 'mult_exon_skip':
                    ver, info = verify_mult_exon_skip(
                        ev[i], genes[g_idx], segments[:, s_idx].T,
                        sp.c_[edge_idx, edges[:, s_idx]], CFG)
                elif event_type == 'mutex_exons':
                    ver, info = verify_mutex_exons(
                        ev[i], genes[g_idx], segments[:, s_idx].T,
                        sp.c_[edge_idx, edges[:, s_idx]], CFG)

                ev[i].verified.append(ver)
                if s_idx == 0:
                    counts.append(sp.array([info]))
                else:
                    counts[-1] = sp.r_[counts[-1], sp.array([info])]
            ev[i].verified = sp.array(ev[i].verified, dtype='bool')

        IN.close()
        counts = sp.dstack(counts)
    ev = ev[old_idx]
    counts = counts[:, :, old_idx]

    if out_fn is not None:
        cPickle.dump((ev, counts), open(out_fn, 'w'))

    return (ev, counts)
 def velocity_field(self):
     """Current calculated velocity field across simulated grid points."""
     return scipy.dstack((self._u_int, self._v_int))
예제 #14
0
matpyDir = '/home/dell/Desktop/sarvaswa/objectness-release-v2.2/Trial_Pascal/testSet/exp1/salprop-v1.0/matpy/'
imgFile = matpyDir + 'imgFile.png'
colorTextureFile = matpyDir + 'colorTextureFile.mat'

#Loading Image and defining parameters
img = imread(imgFile)
imgLab = cvtColor(img, COLOR_BGR2LAB)
L = imgLab[:, :, 0]
sigma = 0.5

#TEXTURE FEATURES
#LoG Computation on L channel of LAB Image
LoG1 = nd.gaussian_laplace(L, sigma)
LoG2 = nd.gaussian_laplace(L, 2 * sigma)
LoG3 = nd.gaussian_laplace(L, 4 * sigma)
LoG = dstack((LoG1, LoG2, LoG3))
LoG = average(LoG, axis=2)

#Gaussian Filter Computation on L,A,B channels of LAB Image
G1 = nd.gaussian_filter(L, sigma)
G2 = nd.gaussian_filter(L, 2 * sigma)
G3 = nd.gaussian_filter(L, 4 * sigma)

#DoG Computation on L channel of LAB Image
DoG1 = G2 - G1
DoG2 = G3 - G2
DoG = dstack((DoG1, DoG2))
DoG = average(DoG, axis=2)

#Generate Features using above computed matrices
feat = dstack((DoG, LoG))
예제 #15
0
def verify_all_events(ev,
                      strain_idx=None,
                      list_bam=None,
                      event_type=None,
                      CFG=None,
                      out_fn=None):
    # (ev, counts) = verify_all_events(ev, strain_idx, list_bam, event_type, CFG) ;

    ### set parameters if called by rproc
    if strain_idx is None:
        PAR = ev
        ev = PAR['ev']
        strain_idx = PAR['strain_idx']
        list_bam = PAR['list_bam']
        if 'out_fn' in PAR:
            out_fn = PAR['out_fn']
        event_type = PAR['event_type']
        CFG = PAR['CFG']

    ### verify the events if demanded
    if CFG['verify_alt_events']:

        prune_tag = ''
        if CFG['do_prune']:
            prune_tag = '_pruned'
        validate_tag = ''
        if CFG['validate_splicegraphs']:
            validate_tag = '.validated'

        (genes, inserted) = cPickle.load(
            open('%s/spladder/genes_graph_conf%i.%s%s%s.pickle' %
                 (CFG['out_dirname'], CFG['confidence_level'],
                  CFG['merge_strategy'], validate_tag, prune_tag)))

        fn_count = '%s/spladder/genes_graph_conf%i.%s%s%s.count.hdf5' % (
            CFG['out_dirname'], CFG['confidence_level'], CFG['merge_strategy'],
            validate_tag, prune_tag)
        ### load count index data from hdf5
        IN = h5py.File(fn_count, 'r')
        if os.path.exists(fn_count + '.quick_ids_segs'):
            gene_ids_segs = cPickle.load(
                open(fn_count + '.quick_ids_segs', 'r'))
        else:
            gene_ids_segs = IN['gene_ids_segs'][:]
        if os.path.exists(fn_count + '.quick_ids_edges'):
            gene_ids_edges = cPickle.load(
                open(fn_count + '.quick_ids_edges', 'r'))
        else:
            gene_ids_edges = IN['gene_ids_edges'][:]
        if os.path.exists(fn_count + '.quick_edge_idx'):
            edge_idx = cPickle.load(open(fn_count + '.quick_edge_idx', 'r'))
        else:
            edge_idx = IN['edge_idx'][:]

        ### sort events by gene idx
        s_idx = sp.argsort([x.gene_idx for x in ev])
        ev = ev[s_idx]
        old_idx = sp.argsort(s_idx)

        ### find gene idx boundaries

        counts = []
        for i in range(ev.shape[0]):
            g_idx = ev[i].gene_idx

            ### there are no edges present in the event
            if gene_ids_edges.shape[0] == 0:
                ver, info = verify_empty(event_type)
                counts.append(sp.array([info]))
                ev[i].verified = sp.array(ev[i].verified, dtype='bool')
                continue

            gr_idx_segs = sp.where(gene_ids_segs == g_idx)[0]
            gr_idx_edges = sp.where(gene_ids_edges == g_idx)[0]
            if gr_idx_edges.shape[0] == 0:
                ver, info = verify_empty(event_type)
                counts.append(sp.array([info]))
                ev[i].verified = sp.array(ev[i].verified, dtype='bool')
                continue

            ### laod relevant count data from HDF5
            segments = sp.atleast_2d(
                IN['segments'][gr_idx_segs, :])[:, strain_idx]
            seg_pos = sp.atleast_2d(IN['seg_pos'][gr_idx_segs, :])[:,
                                                                   strain_idx]
            edges = sp.atleast_2d(IN['edges'][gr_idx_edges, :])[:, strain_idx]
            curr_edge_idx = edge_idx[gr_idx_edges]

            for s_idx in range(len(strain_idx)):
                #sys.stdout.write('.')
                #if s_idx > 0 and s_idx % 50 == 0:
                #    sys.stdout.write('%i\n' % s_idx)
                # ev_tmp.subset_strain(s_idx) ### TODO
                if event_type == 'exon_skip':
                    ver, info = verify_exon_skip(
                        ev[i], genes[g_idx], segments[:, s_idx].T,
                        sp.c_[curr_edge_idx, edges[:, s_idx]], CFG)
                elif event_type in ['alt_3prime', 'alt_5prime']:
                    ver, info = verify_alt_prime(
                        ev[i], genes[g_idx], segments[:, s_idx].T,
                        sp.c_[curr_edge_idx, edges[:, s_idx]], CFG)
                elif event_type == 'intron_retention':
                    ver, info = verify_intron_retention(
                        ev[i], genes[g_idx], segments[:, s_idx].T,
                        sp.c_[curr_edge_idx,
                              edges[:, s_idx]], seg_pos[:, s_idx].T, CFG)
                elif event_type == 'mult_exon_skip':
                    ver, info = verify_mult_exon_skip(
                        ev[i], genes[g_idx], segments[:, s_idx].T,
                        sp.c_[curr_edge_idx, edges[:, s_idx]], CFG)
                elif event_type == 'mutex_exons':
                    ver, info = verify_mutex_exons(
                        ev[i], genes[g_idx], segments[:, s_idx].T,
                        sp.c_[curr_edge_idx, edges[:, s_idx]], CFG)

                ev[i].verified.append(ver)
                if s_idx == 0:
                    counts.append(sp.array([info]))
                else:
                    counts[-1] = sp.r_[counts[-1], sp.array([info])]
            ev[i].verified = sp.array(ev[i].verified, dtype='bool')

        IN.close()
        counts = sp.dstack(counts)
    ev = ev[old_idx]
    counts = counts[:, :, old_idx]

    if out_fn is not None:
        cPickle.dump((ev, counts), open(out_fn, 'w'), -1)

    return (ev, counts)
예제 #16
0
              sp.shape(Velocity1L_t)[1],
              axis=1), [t for t in range(2,
                                         sp.shape(Velocity1L_t)[1])], 1)
E_i_val = d_test['arr_4'][:numSamples]
E_f_val = d_test['arr_5'][:numSamples]
p_x_i_val = d_test['arr_6'][:numSamples]
p_x_f_val = d_test['arr_7'][:numSamples]
p_y_i_val = d_test['arr_8'][:numSamples]
p_y_f_val = d_test['arr_9'][:numSamples]

#===============================================================================================================
#   Collect Input and Output for Network
#===============================================================================================================

# Dimensions of (samples, timesteps, features)
input_Arr = sp.dstack(
    (Position1L_firstSecond, Velocity1L_firstSecond, dt_Arr))[:, 0, :]
target_Arr = sp.dstack((Position1L_firstSecond, Velocity1L_firstSecond))[:,
                                                                         1, :]
input_Arr_val = sp.dstack(
    (Position1L_firstSecond_val, Velocity1L_firstSecond_val, dt_Arr_val))[:,
                                                                          0, :]
target_Arr_val = sp.dstack(
    (Position1L_firstSecond_val, Velocity1L_firstSecond_val))[:, 1, :]

#===============================================================================================================
#   Network
#===============================================================================================================

# Training existing model. Comment out if you do not wish to do this.
# model = load_model("trainedModel_temp.hd5")
예제 #17
0
is_candidate = sp.zeros(is_candidate.shape[0], dtype='bool')
is_candidate[keep_idx] = 1
pickle.dump(is_candidate, open(candidate_out_step3, 'wb'), -1)

### report found candidates without checking for nearby variants
event_pos = sp.array([EV['event_pos'][i, :] for i in keep_idx])
gene_idx = sp.array([EV['gene_idx'][i] for i in keep_idx], dtype='int')
gene_strand = sp.array([EV['gene_strand'][i] for i in gene_idx])
gene_ids = sp.array([EV['gene_names'][i] for i in gene_idx])
gene_names = sp.array([un.get_ID(_, lookup=lookup) for _ in gene_ids])
is_coding = sp.array(
    ['coding' if _ in coding_genes else 'non-coding' for _ in gene_ids])
event_chr = sp.array([EV['gene_chr'][i] for i in gene_idx])

### number of candidates per gene
gene_mult_dict = dict(sp.dstack(sp.unique(gene_ids, return_counts=True))[0, :])
gene_mult = sp.array([gene_mult_dict[_] for _ in gene_ids])

s_idx = sp.argsort(delta_psi)[::-1]
#           0         1          2--7       8         9           10         11           12         13        14       15        16            17         18              19
res = sp.c_[keep_idx, event_chr, event_pos, gene_ids, gene_names, delta_psi,
            gene_strand, is_coding, cov_mean, cov_max, affected, non_affected,
            gene_mult, affected_files, non_affected_files, affected_donors,
            non_affected_donors][s_idx, :]
sp.savetxt(os.path.join(BASEDIR_AS, 'alternative_splicing',
                        'exonization_candidates_C2.txt'),
           res,
           fmt='%s',
           delimiter='\t')

### continue overlapping to variants in a nearby window
예제 #18
0
def verify_all_events(ev, strain_idx=None, list_bam=None, event_type=None, CFG=None, out_fn=None):
    # (ev, counts) = verify_all_events(ev, strain_idx, list_bam, event_type, CFG) ;

    ### set parameters if called by rproc
    if strain_idx is None:
        PAR = ev
        ev = PAR["ev"]
        strain_idx = PAR["strain_idx"]
        list_bam = PAR["list_bam"]
        if "out_fn" in PAR:
            out_fn = PAR["out_fn"]
        event_type = PAR["event_type"]
        CFG = PAR["CFG"]

    ### verify the events if demanded
    if CFG["verify_alt_events"]:

        prune_tag = ""
        if CFG["do_prune"]:
            prune_tag = "_pruned"
        validate_tag = ""
        if CFG["validate_splicegraphs"]:
            validate_tag = ".validated"

        if CFG["merge_strategy"] == "single":
            (genes, inserted) = cPickle.load(
                open(
                    "%s/spladder/genes_graph_conf%i.%s%s%s.pickle"
                    % (CFG["out_dirname"], CFG["confidence_level"], CFG["samples"][strain_idx], validate_tag, prune_tag)
                )
            )
            fn_count = "%s/spladder/genes_graph_conf%i.%s%s%s.count.hdf5" % (
                CFG["out_dirname"],
                CFG["confidence_level"],
                CFG["samples"][strain_idx],
                validate_tag,
                prune_tag,
            )
        else:
            (genes, inserted) = cPickle.load(
                open(
                    "%s/spladder/genes_graph_conf%i.%s%s%s.pickle"
                    % (CFG["out_dirname"], CFG["confidence_level"], CFG["merge_strategy"], validate_tag, prune_tag)
                )
            )
            fn_count = "%s/spladder/genes_graph_conf%i.%s%s%s.count.hdf5" % (
                CFG["out_dirname"],
                CFG["confidence_level"],
                CFG["merge_strategy"],
                validate_tag,
                prune_tag,
            )

        ### load count index data from hdf5
        IN = h5py.File(fn_count, "r")
        if os.path.exists(fn_count + ".quick_ids_segs"):
            gene_ids_segs = cPickle.load(open(fn_count + ".quick_ids_segs", "r"))
        else:
            gene_ids_segs = IN["gene_ids_segs"][:]
        if os.path.exists(fn_count + ".quick_ids_edges"):
            gene_ids_edges = cPickle.load(open(fn_count + ".quick_ids_edges", "r"))
        else:
            gene_ids_edges = IN["gene_ids_edges"][:]
        if os.path.exists(fn_count + ".quick_edge_idx"):
            edge_idx = cPickle.load(open(fn_count + ".quick_edge_idx", "r"))
        else:
            edge_idx = IN["edge_idx"][:]

        ### sort events by gene idx
        s_idx = sp.argsort([x.gene_idx for x in ev])
        ev = ev[s_idx]
        old_idx = sp.argsort(s_idx)

        counts = []
        for i in range(ev.shape[0]):

            # sys.stdout.write('.')
            # if i > 0 and i % 50 == 0:
            #    sys.stdout.write('%i (%i)\n' % (i, ev.shape[0]))
            # sys.stdout.flush()

            g_idx = ev[i].gene_idx
            ev[i].verified = []  ### TODO: maybe solve that differently

            ### there are no edges present in the event
            if gene_ids_edges.shape[0] == 0:
                ver, info = verify_empty(event_type)
                counts.append(sp.array([info]))
                ev[i].verified = sp.array(ev[i].verified, dtype="bool")
                continue

            gr_idx_segs = sp.where(gene_ids_segs == g_idx)[0]
            gr_idx_edges = sp.where(gene_ids_edges == g_idx)[0]
            if gr_idx_edges.shape[0] == 0:
                ver, info = verify_empty(event_type)
                counts.append(sp.array([info]))
                ev[i].verified = sp.array(ev[i].verified, dtype="bool")
                continue

            ### laod relevant count data from HDF5
            segments = sp.atleast_2d(IN["segments"][gr_idx_segs, :])[:, strain_idx]
            seg_pos = sp.atleast_2d(IN["seg_pos"][gr_idx_segs, :])[:, strain_idx]
            edges = sp.atleast_2d(IN["edges"][gr_idx_edges, :])[:, strain_idx]
            curr_edge_idx = edge_idx[gr_idx_edges]

            for s_idx in range(len(strain_idx)):
                # sys.stdout.write('.')
                # if s_idx > 0 and s_idx % 50 == 0:
                #    sys.stdout.write('%i (%i)\n' % (s_idx, len(strain_idx)))
                # ev_tmp.subset_strain(s_idx) ### TODO
                # sys.stdout.flush()
                if event_type == "exon_skip":
                    ver, info = verify_exon_skip(
                        ev[i], genes[g_idx], segments[:, s_idx].T, sp.c_[curr_edge_idx, edges[:, s_idx]], CFG
                    )
                elif event_type in ["alt_3prime", "alt_5prime"]:
                    ver, info = verify_alt_prime(
                        ev[i], genes[g_idx], segments[:, s_idx].T, sp.c_[curr_edge_idx, edges[:, s_idx]], CFG
                    )
                elif event_type == "intron_retention":
                    ver, info = verify_intron_retention(
                        ev[i],
                        genes[g_idx],
                        segments[:, s_idx].T,
                        sp.c_[curr_edge_idx, edges[:, s_idx]],
                        seg_pos[:, s_idx].T,
                        CFG,
                    )
                elif event_type == "mult_exon_skip":
                    ver, info = verify_mult_exon_skip(
                        ev[i], genes[g_idx], segments[:, s_idx].T, sp.c_[curr_edge_idx, edges[:, s_idx]], CFG
                    )
                elif event_type == "mutex_exons":
                    ver, info = verify_mutex_exons(
                        ev[i], genes[g_idx], segments[:, s_idx].T, sp.c_[curr_edge_idx, edges[:, s_idx]], CFG
                    )

                ev[i].verified.append(ver)
                if s_idx == 0:
                    counts.append(sp.array([info]))
                else:
                    counts[-1] = sp.r_[counts[-1], sp.array([info])]
            ev[i].verified = sp.array(ev[i].verified, dtype="bool")

        IN.close()
        counts = sp.dstack(counts)
    ev = ev[old_idx]
    counts = counts[:, :, old_idx]

    if out_fn is not None:
        cPickle.dump((ev, counts), open(out_fn, "w"), -1)

    return (ev, counts)
예제 #19
0
파일: weave.py 프로젝트: vishwa91/tests
def no_weave(a):
    b = sp.dstack(a)
    return b.sum(axis=2)
예제 #20
0
def verify_all_events(ev, strain_idx=None, list_bam=None, event_type=None, options=None, out_fn=None):

    ### set parameters if called by rproc
    if strain_idx is None:
        PAR = ev
        ev = PAR['ev']
        strain_idx = PAR['strain_idx']
        list_bam = PAR['list_bam']
        if 'out_fn' in PAR:
            out_fn = PAR['out_fn']
        event_type = PAR['event_type']
        options = PAR['options']

    ### verify the events if demanded
    if options.verify_alt_events:

        prune_tag = ''
        if options.do_prune:
            prune_tag = '_pruned'
        validate_tag = ''
        if options.validate_sg:
            validate_tag = '.validated'

        if options.merge == 'single':
            (genes, inserted) = pickle.load(open('%s/spladder/genes_graph_conf%i.%s%s%s.pickle' % (options.outdir, options.confidence, options.samples[strain_idx], validate_tag, prune_tag), 'rb'))
            fn_count = '%s/spladder/genes_graph_conf%i.%s%s%s.count.hdf5' % (options.outdir, options.confidence, options.samples[strain_idx], validate_tag, prune_tag)
        else:
            (genes, inserted) = pickle.load(open('%s/spladder/genes_graph_conf%i.%s%s%s.pickle' % (options.outdir, options.confidence, options.merge, validate_tag, prune_tag), 'rb'))
            fn_count = '%s/spladder/genes_graph_conf%i.%s%s%s.count.hdf5' % (options.outdir, options.confidence, options.merge, validate_tag, prune_tag)

        ### load count index data from hdf5
        IN = h5py.File(fn_count, 'r')
        if os.path.exists(fn_count + '.quick_ids_segs'):
            gene_ids_segs = pickle.load(open(fn_count + '.quick_ids_segs', 'rb'))
        else:
            gene_ids_segs = IN['gene_ids_segs'][:]
            pickle.dump(gene_ids_segs, open(fn_count + '.quick_ids_segs', 'wb'), -1)
        if os.path.exists(fn_count + '.quick_ids_edges'):
            gene_ids_edges = pickle.load(open(fn_count + '.quick_ids_edges', 'rb'))
        else:
            gene_ids_edges = IN['gene_ids_edges'][:]
            pickle.dump(gene_ids_edges, open(fn_count + '.quick_ids_edges', 'wb'), -1)
        if os.path.exists(fn_count + '.quick_edge_idx'):
            edge_idx = pickle.load(open(fn_count + '.quick_edge_idx', 'rb'))
        else:
            edge_idx = IN['edge_idx'][:]
            pickle.dump(edge_idx, open(fn_count + '.quick_edge_idx', 'wb'), -1)

        ### sort events by gene idx
        s_idx = sp.argsort([x.gene_idx for x in ev])
        ev = ev[s_idx]
        old_idx = sp.argsort(s_idx)

        counts = []
        for i in range(ev.shape[0]):
            
            sys.stdout.write('.')
            if i > 0 and i % 50 == 0:
                sys.stdout.write('%i (%i)\n' % (i, ev.shape[0]))
            sys.stdout.flush()
        
            g_idx = ev[i].gene_idx
            ev[i].verified = [] ### TODO: maybe solve that differently

            ### there are no edges present in the event
            if gene_ids_edges.shape[0] == 0:
                ver, info = verify_empty(event_type)
                counts.append(sp.array([info]))
                ev[i].verified = sp.array(ev[i].verified, dtype='bool')
                continue

            gr_idx_segs = sp.where(gene_ids_segs == g_idx)[0]
            gr_idx_edges = sp.where(gene_ids_edges == g_idx)[0]
            if gr_idx_edges.shape[0] == 0:
                ver, info = verify_empty(event_type)
                counts.append(sp.array([info]))
                ev[i].verified = sp.array(ev[i].verified, dtype='bool')
                continue

            if isinstance(strain_idx, int):
                strain_idx = [strain_idx]

            ### laod relevant count data from HDF5
            segments = sp.atleast_2d(IN['segments'][gr_idx_segs, :])[:, strain_idx]
            seg_pos = sp.atleast_2d(IN['seg_pos'][gr_idx_segs, :])[:, strain_idx]
            edges = sp.atleast_2d(IN['edges'][gr_idx_edges, :])[:, strain_idx]
            curr_edge_idx = edge_idx[gr_idx_edges]

            for s_idx in range(len(strain_idx)):
                #if s_idx > 0 and s_idx % 50 == 0:
                #    sys.stdout.write('%i (%i)\n' % (s_idx, len(strain_idx)))
               # ev_tmp.subset_strain(s_idx) ### TODO 
                #sys.stdout.flush()
                if event_type == 'exon_skip':
                    ver, info = verify_exon_skip(ev[i], genes[g_idx], segments[:, s_idx].T,  sp.c_[curr_edge_idx, edges[:, s_idx]], options)
                elif event_type in ['alt_3prime', 'alt_5prime']:
                    ver, info = verify_alt_prime(ev[i], genes[g_idx], segments[:, s_idx].T,  sp.c_[curr_edge_idx, edges[:, s_idx]], options)
                elif event_type == 'intron_retention':
                    ver, info = verify_intron_retention(ev[i], genes[g_idx], segments[:, s_idx].T,  sp.c_[curr_edge_idx, edges[:, s_idx]], seg_pos[:, s_idx].T, options)
                elif event_type == 'mult_exon_skip':
                    ver, info = verify_mult_exon_skip(ev[i], genes[g_idx], segments[:, s_idx].T,  sp.c_[curr_edge_idx, edges[:, s_idx]], options)
                elif event_type == 'mutex_exons':
                    ver, info = verify_mutex_exons(ev[i], genes[g_idx], segments[:, s_idx].T,  sp.c_[curr_edge_idx, edges[:, s_idx]], options)

                ev[i].verified.append(ver)
                if s_idx == 0:
                    counts.append(sp.array([info], dtype='float'))
                else:
                    counts[-1] = sp.r_[counts[-1], sp.array([info], dtype='float')]
            ev[i].verified = sp.array(ev[i].verified, dtype='bool')

        IN.close()
        counts = sp.dstack(counts)
    ev = ev[old_idx]
    counts = counts[:, :, old_idx]

    if out_fn is not None:
        pickle.dump((ev, counts), open(out_fn, 'wb'), -1)

    return (ev, counts)
예제 #21
0
    def __init__(self,coordlist,paramlist,times = None,sensor_loc = sp.zeros(3),ver =0,coordvecs =
            None,paramnames=None,species=None,velocity=None):
        """ This constructor function will use create an instance of the IonoContainer class
        using either cartisian or spherical coordinates depending on which ever the user prefers.
        Inputs:
        coordlist - Nx3 Numpy array where N is the number of coordinates.
        paramlist - NxTxP Numpy array where T is the number of times and P is the number of parameters
                    alternatively it could be NxP if there is only one time instance.
        times - A T length numpy array where T is the number of times.  This is
                optional input, if not given then its just a numpy array of 0-T
        sensor_loc - A numpy array of length 3 that gives the sensor location.
                    The default value is [0,0,0] in cartisian space.
        ver - (Optional) If 0 the coordlist is in Cartisian coordinates if 1 then
        coordlist is a spherical coordinates.
        coordvecs - (Optional) A dictionary that holds the individual coordinate vectors.
        if sphereical coordinates keys are 'r','theta','phi' if cartisian 'x','y','z'.
        paramnames - This is a list or number numpy array of numbers for each parameter in the
        """
        r2d = 180.0/np.pi
        d2r = np.pi/180.0
        # Set up the size for the time vector if its not given.
        Ndims = paramlist.ndim
        psizetup = paramlist.shape
        if times is None:
            if Ndims==3:
                times = np.arange(psizetup[1])
            else:
                times = np.arange(1)
        if Ndims==2:
            paramlist = paramlist[:,np.newaxis,:]

        # Assume that the

        if ver==0:

            X_vec = coordlist[:,0]
            Y_vec = coordlist[:,1]
            Z_vec = coordlist[:,2]

            R_vec = sp.sqrt(X_vec**2+Y_vec**2+Z_vec**2)
            Az_vec = sp.arctan2(X_vec,Y_vec)*r2d
            El_vec = sp.arcsin(Z_vec/R_vec)*r2d

            self.Cart_Coords = coordlist
            self.Sphere_Coords = sp.array([R_vec,Az_vec,El_vec]).transpose()
            if coordvecs is not None:
                if set(coordvecs)!={'x','y','z'}:
                    raise NameError("Keys for coordvecs need to be 'x','y','z' ")
            else:
                coordvecs = ['x','y','z']

        elif ver==1:
            R_vec = coordlist[:,0]
            Az_vec = coordlist[:,1]
            El_vec = coordlist[:,2]

            xvecmult = np.sin(Az_vec*d2r)*np.cos(El_vec*d2r)
            yvecmult = np.cos(Az_vec*d2r)*np.cos(El_vec*d2r)
            zvecmult = np.sin(El_vec*d2r)
            X_vec = R_vec*xvecmult
            Y_vec = R_vec*yvecmult
            Z_vec = R_vec*zvecmult

            self.Cart_Coords = sp.column_stack((X_vec,Y_vec,Z_vec))
            self.Sphere_Coords = coordlist
            if coordvecs is not None:
                if set(coordvecs)!={'r','theta','phi'}:
                    raise NameError("Keys for coordvecs need to be 'r','theta','phi' ")
            else:
                 coordvecs = ['r','theta','phi']
        # used to deal with the change in the files
        if type(coordvecs)==np.ndarray:
            coordvecs = [str(ic) for ic in coordvecs]

        self.Param_List = paramlist
        self.Time_Vector = times
        self.Coord_Vecs = coordvecs
        self.Sensor_loc = sensor_loc
        self.Species = species
        (Nloc,Nt) = paramlist.shape[:2]
        #set up a Velocity measurement
        if velocity is None:
            self.Velocity=sp.zeros((Nloc,Nt,3))
        else:
            # if in sperical coordinates and you have a velocity
            if velocity.ndim ==2 and ver==1:
                veltup = (velocity*sp.tile(xvecmult[:,sp.newaxis],(1,Nt)),
                          velocity*sp.tile(yvecmult[:,sp.newaxis],(1,Nt)),
                        velocity*sp.tile(zvecmult[:,sp.newaxis],(1,Nt)))
                self.Velocity=  sp.dstack(veltup)
            else:
                self.Velocity=velocity
        # set up a params name
        if paramnames is None:
            partparam = paramlist.shape[2:]
            if species is not None:
                paramnames = [['Ni_'+isp,'Ti_'+isp] for isp in species[:-1]]
                paramnames.append(['Ne','Te'])
                self.Param_Names=sp.array(paramnames,dtype=str)
            else:

                paramnums = np.arange(np.product(partparam))
                self.Param_Names = np.reshape(paramnums,partparam)
        else:
            self.Param_Names = paramnames
예제 #22
0
def distance(a, b):
    #Returns the distance from point a to point b. Takes any iterable class of length 3 representing X,Y,Z in cartesian coordinates. Arrays/lists/tuples all work.
    subtrArray = sp.dstack([a, -b]).sum(2)
    return sp.sqrt(sp.sum([n**2 for n in subtrArray]))
예제 #23
0
def quantify_from_graph(ev, strain_idx=None, event_type=None, CFG=None, out_fn=None, fn_merge=None):
    # cov = quantify_from_graph(ev, strain_idx, event_type, CFG, out_fn)

    ### set parameters if called by rproc
    if strain_idx is None:
        PAR = ev
        ev = PAR['ev']
        strain_idx = PAR['strain_idx']
        if 'out_fn' in PAR:
            out_fn = PAR['out_fn']
        event_type = PAR['event_type']
        CFG = PAR['CFG']

    if fn_merge is None:
        fn_merge = get_filename('fn_out_merge_val', CFG)

    if CFG['is_matlab']:
        genes = scio.loadmat(fn_merge, struct_as_record=False)['genes'][0, :]
        fn_count = fn_merge.replace('mat', 'count.mat')
    else:
        genes = cPickle.load(open(fn_merge_val, 'r'))[0]
        fn_count = fn_merge.replace('pickle', 'count.hdf5')

    ### load count index data from hdf5
    IN = h5py.File(fn_count, 'r')
    gene_ids_segs = IN['gene_ids_segs'][:].astype('int')
    gene_ids_edges = IN['gene_ids_edges'][:].astype('int')
    if len(gene_ids_segs.shape) > 1:
        gene_ids_segs = gene_ids_segs[0, :]
    if len(gene_ids_edges.shape) > 1:
        gene_ids_edges = gene_ids_edges[0, :]

    ### sort events by gene idx
    s_idx = sp.argsort([x.gene_idx for x in ev])
    ev = ev[s_idx]
    old_idx = sp.argsort(s_idx)

    ### find gene idx boundaries
    assert(isequal(gene_ids_segs, sp.sort(gene_ids_segs)))
    assert(isequal(gene_ids_edges, sp.sort(gene_ids_edges)))

    tmp, genes_f_idx_segs = sp.unique(gene_ids_segs, return_index=True)
    genes_l_idx_segs = sp.r_[genes_f_idx_segs[1:] - 1, gene_ids_segs.shape[0]]

    tmp, genes_f_idx_edges = sp.unique(gene_ids_edges, return_index=True)
    genes_l_idx_edges = sp.r_[genes_f_idx_edges[1:] - 1, gene_ids_edges.shape[0]]

    gr_idx_segs = 0
    gr_idx_edges = 0
    counts = []
    for i in range(ev.shape[0]):
        sys.stdout.write('.')
        if i % 10 == 0:
            sys.stdout.write('%i\n' % i)
        sys.stdout.flush()
        if CFG['is_matlab']:
            offset = 1
        else:
            offset = 0
        g_idx = ev[i].gene_idx

        while gene_ids_segs[genes_f_idx_segs[gr_idx_segs]] < g_idx:
            gr_idx_segs += 1
        assert(gene_ids_segs[genes_f_idx_segs[gr_idx_segs]] == g_idx)

        while gene_ids_edges[genes_f_idx_edges[gr_idx_edges]] < g_idx:
            gr_idx_edges += 1
        assert(gene_ids_edges[genes_f_idx_edges[gr_idx_edges]] == g_idx)

        ### laod relevant count data from HDF5
        if CFG['is_matlab']:
            segments = IN['segments'][strain_idx, genes_f_idx_segs[gr_idx_segs]:genes_l_idx_segs[gr_idx_segs] + 1].T
            seg_pos = IN['seg_pos'][strain_idx, genes_f_idx_segs[gr_idx_segs]:genes_l_idx_segs[gr_idx_segs] + 1].astype('int')
            edges = IN['edges'][strain_idx, genes_f_idx_edges[gr_idx_edges]:genes_l_idx_edges[gr_idx_edges] + 1].T
            edge_idx = IN['edge_idx'][0, genes_f_idx_edges[gr_idx_edges]:genes_l_idx_edges[gr_idx_edges] + 1].astype('int')
        else:
            segments = IN['segments'][genes_f_idx_segs[gr_idx_segs]:genes_l_idx_segs[gr_idx_segs]+1, strain_idx]
            seg_pos = IN['seg_pos'][genes_f_idx_segs[gr_idx_segs]:genes_l_idx_segs[gr_idx_segs]+1, strain_idx]
            edges = IN['edges'][genes_f_idx_edges[gr_idx_edges]:genes_l_idx_edges[gr_idx_edges]+1, strain_idx]
            edge_idx = IN['edge_idx'][genes_f_idx_edges[gr_idx_edges]:genes_l_idx_edges[gr_idx_edges]+1]

        for s_idx in range(len(strain_idx)):
            #print '%i/%i' % (s_idx, len(strain_idx))

            if event_type == 'exon_skip':
                cov = quantify_exon_skip(ev[i], genes[g_idx - offset], segments[:, s_idx].T,  sp.c_[edge_idx, edges[:, s_idx]], CFG)
            elif event_type in ['alt_3prime', 'alt_5prime']:
                cov = quantify_alt_prime(ev[i], genes[g_idx - offset], segments[:, s_idx].T,  sp.c_[edge_idx, edges[:, s_idx]], CFG)
            elif event_type == 'intron_retention':
                cov = quantify_intron_retention(ev[i], genes[g_idx - offset], segments[:, s_idx].T,  sp.c_[edge_idx, edges[:, s_idx]], seg_pos[:, s_idx].T, CFG)
            elif event_type == 'mult_exon_skip':
                cov = quantify_mult_exon_skip(ev[i], genes[g_idx - offset], segments[:, s_idx].T,  sp.c_[edge_idx, edges[:, s_idx]], CFG)
            elif event_type == 'mutex_exons':
                cov = quantify_mutex_exons(ev[i], genes[g_idx - offset], segments[:, s_idx].T,  sp.c_[edge_idx, edges[:, s_idx]], CFG)

            if s_idx == 0:
                counts.append(sp.array([cov]))
            else:
                counts[-1] = sp.r_[counts[-1], sp.array([cov])]
    IN.close()
    counts = sp.dstack(counts)

    ### re-sort by old idx
    ev = ev[old_idx]
    counts = counts[:, :, old_idx]

    if out_fn is not None:
        cPickle.dump((ev, counts), open(out_fn, 'w'))

    return (ev, counts)
예제 #24
0
 def __init__(self, series1, series2, wave1, wave2, *args, **kwargs):
     series = sp.vstack((series1.ravel(), series2.ravel())).T
     wave = sp.dstack((wave1, wave2))
     WaveletTransform.__init__(self, series, wave, *args, **kwargs)
예제 #25
0
def dsift(im,
            verbose=True,
            fast=True,
            sizes=[4, 6, 8, 10],
            step=2,
            color='rgb',
            floatdescriptors=False,
            magnif=6,
            windowsize=1.5,
            contrastthreshold=0.005):

    opts = Options(verbose, fast, sizes, step, color, floatdescriptors,
                   magnif, windowsize, contrastthreshold)
    dsiftOpts = DSiftOptions(opts)

    # make sure image is float, otherwise segfault
    im = array(im, 'float32')

    # Extract the features
    imageSize = shape(im)
    if im.ndim == 3:
        if imageSize[2] != 3:
            # "IndexError: tuple index out of range" if both if's are checked at the same time
            raise ValueError("Image data in unknown format/shape")
    if opts.color == 'gray':
        numChannels = 1
        if (im.ndim == 2):
            im = vl_rgb2gray(im)
    else:
        numChannels = 3
        if (im.ndim == 2):
            im = dstack([im, im, im])
        if opts.color == 'rgb':
            pass
        elif opts.color == 'opponent':
             # from https://github.com/vlfeat/vlfeat/blob/master/toolbox/sift/vl_phow.m
             # Note that the mean differs from the standard definition of opponent
             # space and is the regular intesity (for compatibility with
             # the contrast thresholding).
             # Note also that the mean is added pack to the other two
             # components with a small multipliers for monochromatic
             # regions.

            mu = 0.3 * im[:, :, 0] + 0.59 * im[:, :, 1] + 0.11 * im[:, :, 2]
            alpha = 0.01
            im = dstack([mu,
                         (im[:, :, 0] - im[:, :, 1]) / sqrt(2) + alpha * mu,
                         (im[:, :, 0] + im[:, :, 1] - 2 * im[:, :, 2]) / sqrt(6) + alpha * mu])
        else:
            raise ValueError('Color option ' + str(opts.color) + ' not recognized')
    if opts.verbose:
	great='great'
        #print('{0}: color space: {1}'.format('vl_phow', opts.color))
        #print('{0}: image size: {1} x {2}'.format('vl_phow', imageSize[0], imageSize[1]))
        #print('{0}: sizes: [{1}]'.format('vl_phow', opts.sizes))

    frames_all = []
    descrs_all = []
    for size_of_spatial_bins in opts.sizes:
        # from https://github.com/vlfeat/vlfeat/blob/master/toolbox/sift/vl_phow.m
        # Recall from VL_DSIFT() that the first descriptor for scale SIZE has
        # center located at XC = XMIN + 3/2 SIZE (the Y coordinate is
        # similar). It is convenient to align the descriptors at different
        # scales so that they have the same geometric centers. For the
        # maximum size we pick XMIN = 1 and we get centers starting from
        # XC = 1 + 3/2 MAX(OPTS.SIZES). For any other scale we pick XMIN so
        # that XMIN + 3/2 SIZE = 1 + 3/2 MAX(OPTS.SIZES).
        # In pracrice, the offset must be integer ('bounds'), so the
        # alignment works properly only if all OPTS.SZES are even or odd.

        off = floor(3.0 / 2 * (max(opts.sizes) - size_of_spatial_bins)) + 1

        # smooth the image to the appropriate scale based on the size
        # of the SIFT bins
        sigma = size_of_spatial_bins / float(opts.magnif)
        ims = vl_imsmooth(im, sigma)

        # extract dense SIFT features from all channels
        frames = []
        descrs = []
        for k in range(numChannels):
            size_of_spatial_bins = int(size_of_spatial_bins)
            # vl_dsift does not accept numpy.int64 or similar
            f_temp, d_temp = vl_dsift(data=ims[:, :, k],
                                      step=dsiftOpts.step,
                                      size=size_of_spatial_bins,
                                      fast=dsiftOpts.fast,
                                      verbose=dsiftOpts.verbose,
                                      norm=dsiftOpts.norm,
                                      bounds=[off, off, maxint, maxint])
            frames.append(f_temp)
            descrs.append(d_temp)
        frames = array(frames)
        descrs = array(descrs)
        d_new_shape = [descrs.shape[0] * descrs.shape[1], descrs.shape[2]]
        descrs = descrs.reshape(d_new_shape)
        # remove low contrast descriptors
        # note that for color descriptors the V component is
        # thresholded
        if (opts.color == 'gray') | (opts.color == 'opponent'):
            contrast = frames[0][2, :]
        elif opts.color == 'rgb':
            contrast = mean([frames[0][2, :], frames[1][2, :], frames[2][2, :]], 0)
        else:
            raise ValueError('Color option ' + str(opts.color) + ' not recognized')
        descrs[:, contrast < opts.contrastthreshold] = 0

        # save only x,y, and the scale
        frames_temp = array(frames[0][0:3, :])
        padding = array(size_of_spatial_bins * ones(frames[0][0].shape))
        frames_all.append(vstack([frames_temp, padding]))
        descrs_all.append(array(descrs))


    frames_all = hstack(frames_all)
    descrs_all = hstack(descrs_all)
    return frames_all, descrs_all
예제 #26
0
    def __init__(self,
                 coordlist,
                 paramlist,
                 times=None,
                 sensor_loc=sp.zeros(3),
                 ver=0,
                 coordvecs=None,
                 paramnames=None,
                 species=None,
                 velocity=None):
        """ This constructor function will use create an instance of the IonoContainer class
        using either cartisian or spherical coordinates depending on which ever the user prefers.
        Inputs:
        coordlist - Nx3 Numpy array where N is the number of coordinates.
        paramlist - NxTxP Numpy array where T is the number of times and P is the number of parameters
                    alternatively it could be NxP if there is only one time instance.
        times - A T length numpy array where T is the number of times.  This is
                optional input, if not given then its just a numpy array of 0-T
        sensor_loc - A numpy array of length 3 that gives the sensor location.
                    The default value is [0,0,0] in cartisian space.
        ver - (Optional) If 0 the coordlist is in Cartisian coordinates if 1 then
        coordlist is a spherical coordinates.
        coordvecs - (Optional) A dictionary that holds the individual coordinate vectors.
        if sphereical coordinates keys are 'r','theta','phi' if cartisian 'x','y','z'.
        paramnames - This is a list or number numpy array of numbers for each parameter in the
        """
        r2d = 180.0 / np.pi
        d2r = np.pi / 180.0
        # Set up the size for the time vector if its not given.
        Ndims = paramlist.ndim
        psizetup = paramlist.shape
        if times is None:
            if Ndims == 3:
                times = np.arange(psizetup[1])
            else:
                times = np.arange(1)
        if Ndims == 2:
            paramlist = paramlist[:, np.newaxis, :]

        # Assume that the

        if ver == 0:

            X_vec = coordlist[:, 0]
            Y_vec = coordlist[:, 1]
            Z_vec = coordlist[:, 2]

            R_vec = sp.sqrt(X_vec**2 + Y_vec**2 + Z_vec**2)
            Az_vec = sp.arctan2(X_vec, Y_vec) * r2d
            El_vec = sp.arcsin(Z_vec / R_vec) * r2d

            self.Cart_Coords = coordlist
            self.Sphere_Coords = sp.array([R_vec, Az_vec, El_vec]).transpose()
            if coordvecs is not None:
                if set(coordvecs) != {'x', 'y', 'z'}:
                    raise NameError(
                        "Keys for coordvecs need to be 'x','y','z' ")
            else:
                coordvecs = ['x', 'y', 'z']

        elif ver == 1:
            R_vec = coordlist[:, 0]
            Az_vec = coordlist[:, 1]
            El_vec = coordlist[:, 2]

            xvecmult = np.sin(Az_vec * d2r) * np.cos(El_vec * d2r)
            yvecmult = np.cos(Az_vec * d2r) * np.cos(El_vec * d2r)
            zvecmult = np.sin(El_vec * d2r)
            X_vec = R_vec * xvecmult
            Y_vec = R_vec * yvecmult
            Z_vec = R_vec * zvecmult

            self.Cart_Coords = sp.column_stack((X_vec, Y_vec, Z_vec))
            self.Sphere_Coords = coordlist
            if coordvecs is not None:
                if set(coordvecs) != {'r', 'theta', 'phi'}:
                    raise NameError(
                        "Keys for coordvecs need to be 'r','theta','phi' ")
            else:
                coordvecs = ['r', 'theta', 'phi']
        # used to deal with the change in the files
        if type(coordvecs) == np.ndarray:
            coordvecs = [str(ic) for ic in coordvecs]

        self.Param_List = paramlist
        self.Time_Vector = times
        self.Coord_Vecs = coordvecs
        self.Sensor_loc = sensor_loc
        self.Species = species
        (Nloc, Nt) = paramlist.shape[:2]
        #set up a Velocity measurement
        if velocity is None:
            self.Velocity = sp.zeros((Nloc, Nt, 3))
        else:
            # if in sperical coordinates and you have a velocity
            if velocity.ndim == 2 and ver == 1:
                veltup = (velocity * sp.tile(xvecmult[:, sp.newaxis], (1, Nt)),
                          velocity * sp.tile(yvecmult[:, sp.newaxis], (1, Nt)),
                          velocity * sp.tile(zvecmult[:, sp.newaxis], (1, Nt)))
                self.Velocity = sp.dstack(veltup)
            else:
                self.Velocity = velocity
        # set up a params name
        if paramnames is None:
            partparam = paramlist.shape[2:]
            if species is not None:
                paramnames = [['Ni_' + isp, 'Ti_' + isp]
                              for isp in species[:-1]]
                paramnames.append(['Ne', 'Te'])
                self.Param_Names = sp.array(paramnames, dtype=str)
            else:

                paramnums = np.arange(np.product(partparam))
                self.Param_Names = np.reshape(paramnums, partparam)
        else:
            self.Param_Names = paramnames
예제 #27
0
    [t for t in range(1,
                      sp.shape(Velocity1L_t_val)[1] - 1)], 1)
# dt_Arr_val=sp.repeat(d_test['arr_17'][:numSamples,sp.newaxis],T,axis=1)
E_i_val = d_test['arr_4'][:numSamples]
E_f_val = d_test['arr_5'][:numSamples]
p_x_i_val = d_test['arr_6'][:numSamples]
p_x_f_val = d_test['arr_7'][:numSamples]
p_y_i_val = d_test['arr_8'][:numSamples]
p_y_f_val = d_test['arr_9'][:numSamples]

#===============================================================================================================
#   Collect Input and Output for Network
#===============================================================================================================

# Dimensions of (samples, timesteps, features)
input_Arr = sp.dstack(
    (Velocity1L_firstLast, Velocity2L_firstLast, m1_Arr, m2_Arr))[:, 0, :]
target_Arr = sp.dstack(
    (Velocity1L_firstLast, Velocity2L_firstLast, m1_Arr, m2_Arr))[:, 1, :]
input_Arr_val = sp.dstack((Velocity1L_firstLast_val, Velocity2L_firstLast_val,
                           m1_Arr_val, m2_Arr_val))[:, 0, :]
target_Arr_val = sp.dstack((Velocity1L_firstLast_val, Velocity2L_firstLast_val,
                            m1_Arr_val, m2_Arr_val))[:, 1, :]

#===============================================================================================================
#   Network
#===============================================================================================================

# # Training existing model. Comment out if you do not wish to do this.
# # model = load_model("trainedModel_temp.hd5")
#
# model = Sequential()
예제 #28
0
def verify_all_events(ev,
                      strain_idx=None,
                      list_bam=None,
                      event_type=None,
                      options=None,
                      out_fn=None):

    ### set parameters if called by rproc
    if strain_idx is None:
        PAR = ev
        ev = PAR['ev']
        strain_idx = PAR['strain_idx']
        list_bam = PAR['list_bam']
        if 'out_fn' in PAR:
            out_fn = PAR['out_fn']
        event_type = PAR['event_type']
        options = PAR['options']

    ### verify the events if demanded
    if options.verify_alt_events:

        prune_tag = ''
        if options.do_prune:
            prune_tag = '_pruned'
        validate_tag = ''
        if options.validate_sg:
            validate_tag = '.validated'

        if options.merge == 'single':
            (genes, inserted) = pickle.load(
                open(
                    '%s/spladder/genes_graph_conf%i.%s%s%s.pickle' %
                    (options.outdir, options.confidence,
                     options.samples[strain_idx], validate_tag, prune_tag),
                    'rb'))
            fn_count = '%s/spladder/genes_graph_conf%i.%s%s%s.count.hdf5' % (
                options.outdir, options.confidence,
                options.samples[strain_idx], validate_tag, prune_tag)
        else:
            (genes, inserted) = pickle.load(
                open(
                    '%s/spladder/genes_graph_conf%i.%s%s%s.pickle' %
                    (options.outdir, options.confidence, options.merge,
                     validate_tag, prune_tag), 'rb'))
            fn_count = '%s/spladder/genes_graph_conf%i.%s%s%s.count.hdf5' % (
                options.outdir, options.confidence, options.merge,
                validate_tag, prune_tag)

        ### load count index data from hdf5
        IN = h5py.File(fn_count, 'r')
        if os.path.exists(fn_count + '.quick_ids_segs'):
            gene_ids_segs = pickle.load(
                open(fn_count + '.quick_ids_segs', 'rb'))
        else:
            gene_ids_segs = IN['gene_ids_segs'][:]
            pickle.dump(gene_ids_segs, open(fn_count + '.quick_ids_segs',
                                            'wb'), -1)
        if os.path.exists(fn_count + '.quick_ids_edges'):
            gene_ids_edges = pickle.load(
                open(fn_count + '.quick_ids_edges', 'rb'))
        else:
            gene_ids_edges = IN['gene_ids_edges'][:]
            pickle.dump(gene_ids_edges,
                        open(fn_count + '.quick_ids_edges', 'wb'), -1)
        if os.path.exists(fn_count + '.quick_edge_idx'):
            edge_idx = pickle.load(open(fn_count + '.quick_edge_idx', 'rb'))
        else:
            edge_idx = IN['edge_idx'][:]
            pickle.dump(edge_idx, open(fn_count + '.quick_edge_idx', 'wb'), -1)

        ### sort events by gene idx
        s_idx = sp.argsort([x.gene_idx for x in ev])
        ev = ev[s_idx]
        old_idx = sp.argsort(s_idx)

        counts = []
        for i in range(ev.shape[0]):

            sys.stdout.write('.')
            if i > 0 and i % 50 == 0:
                sys.stdout.write('%i (%i)\n' % (i, ev.shape[0]))
            sys.stdout.flush()

            g_idx = ev[i].gene_idx
            ev[i].verified = []  ### TODO: maybe solve that differently

            ### there are no edges present in the event
            if gene_ids_edges.shape[0] == 0:
                ver, info = verify_empty(event_type)
                counts.append(sp.array([info]))
                ev[i].verified = sp.array(ev[i].verified, dtype='bool')
                continue

            gr_idx_segs = sp.where(gene_ids_segs == g_idx)[0]
            gr_idx_edges = sp.where(gene_ids_edges == g_idx)[0]
            if gr_idx_edges.shape[0] == 0:
                ver, info = verify_empty(event_type)
                counts.append(sp.array([info]))
                ev[i].verified = sp.array(ev[i].verified, dtype='bool')
                continue

            if isinstance(strain_idx, int):
                strain_idx = [strain_idx]

            ### laod relevant count data from HDF5
            segments = sp.atleast_2d(
                IN['segments'][gr_idx_segs, :])[:, strain_idx]
            seg_pos = sp.atleast_2d(IN['seg_pos'][gr_idx_segs, :])[:,
                                                                   strain_idx]
            edges = sp.atleast_2d(IN['edges'][gr_idx_edges, :])[:, strain_idx]
            curr_edge_idx = edge_idx[gr_idx_edges]

            for s_idx in range(len(strain_idx)):
                #if s_idx > 0 and s_idx % 50 == 0:
                #    sys.stdout.write('%i (%i)\n' % (s_idx, len(strain_idx)))
                # ev_tmp.subset_strain(s_idx) ### TODO
                #sys.stdout.flush()
                if event_type == 'exon_skip':
                    ver, info = verify_exon_skip(
                        ev[i], genes[g_idx], segments[:, s_idx].T,
                        sp.c_[curr_edge_idx, edges[:, s_idx]], options)
                elif event_type in ['alt_3prime', 'alt_5prime']:
                    ver, info = verify_alt_prime(
                        ev[i], genes[g_idx], segments[:, s_idx].T,
                        sp.c_[curr_edge_idx, edges[:, s_idx]], options)
                elif event_type == 'intron_retention':
                    ver, info = verify_intron_retention(
                        ev[i], genes[g_idx], segments[:, s_idx].T,
                        sp.c_[curr_edge_idx,
                              edges[:, s_idx]], seg_pos[:, s_idx].T, options)
                elif event_type == 'mult_exon_skip':
                    ver, info = verify_mult_exon_skip(
                        ev[i], genes[g_idx], segments[:, s_idx].T,
                        sp.c_[curr_edge_idx, edges[:, s_idx]], options)
                elif event_type == 'mutex_exons':
                    ver, info = verify_mutex_exons(
                        ev[i], genes[g_idx], segments[:, s_idx].T,
                        sp.c_[curr_edge_idx, edges[:, s_idx]], options)

                ev[i].verified.append(ver)
                if s_idx == 0:
                    counts.append(sp.array([info], dtype='float'))
                else:
                    counts[-1] = sp.r_[counts[-1],
                                       sp.array([info], dtype='float')]
            ev[i].verified = sp.array(ev[i].verified, dtype='bool')

        IN.close()
        counts = sp.dstack(counts)
    ev = ev[old_idx]
    counts = counts[:, :, old_idx]

    if out_fn is not None:
        pickle.dump((ev, counts), open(out_fn, 'wb'), -1)

    return (ev, counts)
예제 #29
0
def distance(a,b):
    #Returns the distance from point a to point b. Takes any iterable class of length 3 representing X,Y,Z in cartesian coordinates. Arrays/lists/tuples all work.
    subtrArray = sp.dstack([a,-b]).sum(2)
    return sp.sqrt(sp.sum([n**2 for n in subtrArray]))
예제 #30
0
파일: quantify.py 프로젝트: bowhan/spladder
def quantify_from_graph(ev, strain_idx=None, event_type=None, CFG=None, out_fn=None, fn_merge=None):
    # cov = quantify_from_graph(ev, strain_idx, event_type, CFG, out_fn)

    ### set parameters if called by rproc
    if strain_idx is None:
        PAR = ev
        ev = PAR["ev"]
        strain_idx = PAR["strain_idx"]
        if "out_fn" in PAR:
            out_fn = PAR["out_fn"]
        event_type = PAR["event_type"]
        CFG = PAR["CFG"]

    if fn_merge is None:
        fn_merge = get_filename("fn_out_merge_val", CFG)

    if CFG["is_matlab"]:
        genes = scio.loadmat(fn_merge, struct_as_record=False)["genes"][0, :]
        fn_count = fn_merge.replace("mat", "count.mat")
    else:
        genes = cPickle.load(open(fn_merge_val, "r"))[0]
        fn_count = fn_merge.replace("pickle", "count.pickle")

    ### load count index data from hdf5
    IN = h5py.File(fn_count, "r")
    gene_ids_segs = IN["gene_ids_segs"][:].astype("int")
    gene_ids_edges = IN["gene_ids_edges"][:].astype("int")
    if len(gene_ids_segs.shape) > 1:
        gene_ids_segs = gene_ids_segs[0, :]
    if len(gene_ids_edges.shape) > 1:
        gene_ids_edges = gene_ids_edges[0, :]

    ### sort events by gene idx
    s_idx = sp.argsort([x.gene_idx for x in ev])
    ev = ev[s_idx]
    old_idx = sp.argsort(s_idx)

    ### find gene idx boundaries
    assert isequal(gene_ids_segs, sp.sort(gene_ids_segs))
    assert isequal(gene_ids_edges, sp.sort(gene_ids_edges))

    tmp, genes_f_idx_segs = sp.unique(gene_ids_segs, return_index=True)
    genes_l_idx_segs = sp.r_[genes_f_idx_segs[1:] - 1, gene_ids_segs.shape[0]]

    tmp, genes_f_idx_edges = sp.unique(gene_ids_edges, return_index=True)
    genes_l_idx_edges = sp.r_[genes_f_idx_edges[1:] - 1, gene_ids_edges.shape[0]]

    gr_idx_segs = 0
    gr_idx_edges = 0
    counts = []
    for i in range(ev.shape[0]):
        sys.stdout.write(".")
        if i % 10 == 0:
            sys.stdout.write("%i\n" % i)
        sys.stdout.flush()
        if CFG["is_matlab"]:
            offset = 1
        else:
            offset = 0
        g_idx = ev[i].gene_idx

        while gene_ids_segs[genes_f_idx_segs[gr_idx_segs]] < g_idx:
            gr_idx_segs += 1
        assert gene_ids_segs[genes_f_idx_segs[gr_idx_segs]] == g_idx

        while gene_ids_edges[genes_f_idx_edges[gr_idx_edges]] < g_idx:
            gr_idx_edges += 1
        assert gene_ids_edges[genes_f_idx_edges[gr_idx_edges]] == g_idx

        ### laod relevant count data from HDF5
        if CFG["is_matlab"]:
            segments = IN["segments"][strain_idx, genes_f_idx_segs[gr_idx_segs] : genes_l_idx_segs[gr_idx_segs] + 1].T
            seg_pos = IN["seg_pos"][
                strain_idx, genes_f_idx_segs[gr_idx_segs] : genes_l_idx_segs[gr_idx_segs] + 1
            ].astype("int")
            edges = IN["edges"][strain_idx, genes_f_idx_edges[gr_idx_edges] : genes_l_idx_edges[gr_idx_edges] + 1].T
            edge_idx = IN["edge_idx"][0, genes_f_idx_edges[gr_idx_edges] : genes_l_idx_edges[gr_idx_edges] + 1].astype(
                "int"
            )
        else:
            segments = IN["segments"][genes_f_idx_segs[gr_idx_segs] : genes_l_idx_segs[gr_idx_segs] + 1, strain_idx]
            seg_pos = IN["seg_pos"][genes_f_idx_segs[gr_idx_segs] : genes_l_idx_segs[gr_idx_segs] + 1, strain_idx]
            edges = IN["edges"][genes_f_idx_edges[gr_idx_edges] : genes_l_idx_edges[gr_idx_edges] + 1, strain_idx]
            edge_idx = IN["edge_idx"][genes_f_idx_edges[gr_idx_edges] : genes_l_idx_edges[gr_idx_edges] + 1]

        for s_idx in range(len(strain_idx)):
            # print '%i/%i' % (s_idx, len(strain_idx))

            if event_type == "exon_skip":
                cov = quantify_exon_skip(
                    ev[i], genes[g_idx - offset], segments[:, s_idx].T, sp.c_[edge_idx, edges[:, s_idx]], CFG
                )
            elif event_type in ["alt_3prime", "alt_5prime"]:
                cov = quantify_alt_prime(
                    ev[i], genes[g_idx - offset], segments[:, s_idx].T, sp.c_[edge_idx, edges[:, s_idx]], CFG
                )
            elif event_type == "intron_retention":
                cov = quantify_intron_retention(
                    ev[i],
                    genes[g_idx - offset],
                    segments[:, s_idx].T,
                    sp.c_[edge_idx, edges[:, s_idx]],
                    seg_pos[:, s_idx].T,
                    CFG,
                )
            elif event_type == "mult_exon_skip":
                cov = quantify_mult_exon_skip(
                    ev[i], genes[g_idx - offset], segments[:, s_idx].T, sp.c_[edge_idx, edges[:, s_idx]], CFG
                )
            elif event_type == "mutex_exons":
                cov = quantify_mutex_exons(
                    ev[i], genes[g_idx - offset], segments[:, s_idx].T, sp.c_[edge_idx, edges[:, s_idx]], CFG
                )

            if s_idx == 0:
                counts.append(sp.array([cov]))
            else:
                counts[-1] = sp.r_[counts[-1], sp.array([cov])]
    IN.close()
    counts = sp.dstack(counts)

    ### re-sort by old idx
    ev = ev[old_idx]
    counts = counts[:, :, old_idx]

    if out_fn is not None:
        cPickle.dump((ev, counts), open(out_fn, "w"))

    return (ev, counts)
예제 #31
0
 def length(self):
     #Returns the length of the edge.
     subtrArray = sp.dstack([self.a,-self.b]).sum(2)
     return sp.sqrt(sp.sum([n**2 for n in subtrArray]))
예제 #32
0
    def plotScatter(self,
                    file_name='scatter_ts',
                    out_dir='./cache',
                    plot_test=True,
                    xaxis=0,
                    yaxis=2,
                    xlab='G1 score',
                    ylab='G2M score',
                    class_labels=['G1 phase', 'S phase', 'G2M phase'],
                    method='RF',
                    decision_lines=True):
        plparams = {
            'backend': 'pdf',
            'axes.labelsize': 14,
            'text.fontsize': 14,
            'legend.fontsize': 13,
            'xtick.labelsize': 14,
            'ytick.labelsize': 14,
            'text.usetex': False
        }
        PL.rcParams.update(plparams)
        assert self.scores != None, 'cyclone: first train the model before attempting to plot'
        if not os.path.exists(out_dir):
            os.makedirs(out_dir)
        file_name = file_name + method + '.pdf'
        if plot_test == False:
            file_name = file_name + method + '_cv.pdf'
        if plot_test == True:
            if method == 'RF':
                labs = self.labels_tst
                scores = self.scores_tst
            elif method == 'LR':
                labs = self.labels_tst
                scores = self.scoresLR_tst
            elif method == 'LRall':
                labs = self.labels_tst
                scores = self.scoresLRall_tst
            elif method == 'GNB':
                labs = self.labels_tst
                scores = self.scoresGNB_tst
            elif method == 'SVM':
                labs = self.labels_tst
                scores = self.scoresSVM_tst
            elif method == 'SVMrbf':
                labs = self.labels_tst
                scores = self.scoresSVMrbf_tst
        else:
            if method == 'RF':
                labs = self.labels
                scores = self.scores
            elif method == 'LR':
                labs = self.labels
                scores = self.scoresLR
            elif method == 'LRall':
                labs = self.labels
                scores = self.scoresLRall
            elif method == 'GNB':
                labs = self.labels
                scores = self.scoresGNB
            elif method == 'SVM':
                labs = self.labels
                scores = self.scoresSVM
            elif method == 'SVMrbf':
                labs = self.labels
                scores = self.scoresSVMrbf
        cols = [
            'r', 'b', 'g', 'y', 'Crimson', 'DeepPink', 'LightSalmon', 'Lime',
            'Olive'
        ]
        cols_d = {}
        cols_d['G1'] = '#1b9e77'
        cols_d['S'] = '#d95f02'
        cols_d['G2M'] = '#7570b3'
        cols_d['mid'] = '#e7298a'
        cols_d['early'] = '#e6ab02'
        cols_d['late'] = '#66a61e'

        labs = labs.astype('int')
        lab_col = list()
        fig = PL.figure(figsize=(6, 6))
        ax = fig.add_subplot(111)
        #ax=PL.axes([0.25,0.25,0.65,0.65])
        ax.set_position([0.1, 0.1, 0.7, 0.7])
        hList = list()
        for iplot in range(len(labs)):
            #hList.append(PL.plot(scores[iplot,xaxis],scores[iplot,yaxis],'.',markersize=15,c=cols[labs[iplot]-1], alpha=0.75))
            if class_labels[labs[iplot] - 1] in cols_d.keys():
                hList.append(
                    PL.plot(scores[iplot, xaxis],
                            scores[iplot, yaxis],
                            '.',
                            markersize=15,
                            c=cols_d[class_labels[labs[iplot] - 1]],
                            alpha=0.75))
            else:
                hList.append(
                    PL.plot(scores[iplot, xaxis],
                            scores[iplot, yaxis],
                            '.',
                            markersize=15,
                            c='#8c510a',
                            alpha=0.75))
        PL.xlabel(xlab)
        PL.ylabel(ylab)
        if xlab == 'G1 score':
            PL.xlim(xmin=0.0, xmax=.95)
            x_max = 0.75
        else:
            x_max = scores[:, xaxis].max()  #+ 0.05
            PL.xlim(xmax=x_max + 0.05)
        if ylab == 'G2M score':
            PL.ylim(ymin=0.0, ymax=.95)
            y_max = 0.75
        else:
            y_max = scores[:, yaxis].max()  #+ 0.05
            PL.ylim(ymax=y_max + 0.05)

        if decision_lines == True:
            x_min = 0.0
            y_min = 0.0
            h = 0.001
            xx, yy = SP.meshgrid(SP.arange(x_min, x_max, h),
                                 SP.arange(y_min, y_max, h))
            zz = 1 - (xx + yy)
            Z = SP.argmax(SP.dstack((xx, yy, zz)), 2)
            PL.contour(xx, yy, Z, levels=[0, 1])

        legH = list()
        u_classes = SP.unique(labs)
        for ileg in u_classes:
            legH.append(hList[SP.where(labs == ileg)[0][0]][0])
        lh = PL.legend(legH,
                       class_labels,
                       loc='upper center',
                       bbox_to_anchor=(0.5, 1.15),
                       ncol=3,
                       numpoints=1,
                       scatterpoints=1)
        lh.set_frame_on(False)
        ax.spines["right"].set_visible(False)
        ax.spines["top"].set_visible(False)
        ax.get_xaxis().tick_bottom()
        PL.savefig(out_dir + '/' + file_name,
                   bbox_extra_artists=[lh])  #,bbox_inches='tight')
예제 #33
0
def vl_phow(im,
            verbose=True,
            fast=True,
            sizes=[4, 6, 8, 10],
            step=2,
            color='rgb',
            floatdescriptors=False,
            magnif=6,
            windowsize=1.5,
            contrastthreshold=0.005):

    opts = Options(verbose, fast, sizes, step, color, floatdescriptors, magnif,
                   windowsize, contrastthreshold)
    dsiftOpts = DSiftOptions(opts)

    # make sure image is float, otherwise segfault
    im = array(im, 'float32')

    # Extract the features
    imageSize = shape(im)
    if im.ndim == 3:
        if imageSize[2] != 3:
            # "IndexError: tuple index out of range" if both if's are checked at the same time
            raise ValueError("Image data in unknown format/shape")
    if opts.color == 'gray':
        numChannels = 1
        if (im.ndim == 2):
            im = vl_rgb2gray(im)
    else:
        numChannels = 3
        if (im.ndim == 2):
            im = dstack([im, im, im])
        if opts.color == 'rgb':
            pass
        elif opts.color == 'opponent':
            # from https://github.com/vlfeat/vlfeat/blob/master/toolbox/sift/vl_phow.m
            # Note that the mean differs from the standard definition of opponent
            # space and is the regular intesity (for compatibility with
            # the contrast thresholding).
            # Note also that the mean is added pack to the other two
            # components with a small multipliers for monochromatic
            # regions.

            mu = 0.3 * im[:, :, 0] + 0.59 * im[:, :, 1] + 0.11 * im[:, :, 2]
            alpha = 0.01
            im = dstack([
                mu, (im[:, :, 0] - im[:, :, 1]) / sqrt(2) + alpha * mu,
                (im[:, :, 0] + im[:, :, 1] - 2 * im[:, :, 2]) / sqrt(6) +
                alpha * mu
            ])
        else:
            raise ValueError('Color option ' + str(opts.color) +
                             ' not recognized')
    if opts.verbose:
        print('{0}: color space: {1}'.format('vl_phow', opts.color))
        print('{0}: image size: {1} x {2}'.format('vl_phow', imageSize[0],
                                                  imageSize[1]))
        print('{0}: sizes: [{1}]'.format('vl_phow', opts.sizes))

    frames_all = []
    descrs_all = []
    for size_of_spatial_bins in opts.sizes:
        # from https://github.com/vlfeat/vlfeat/blob/master/toolbox/sift/vl_phow.m
        # Recall from VL_DSIFT() that the first descriptor for scale SIZE has
        # center located at XC = XMIN + 3/2 SIZE (the Y coordinate is
        # similar). It is convenient to align the descriptors at different
        # scales so that they have the same geometric centers. For the
        # maximum size we pick XMIN = 1 and we get centers starting from
        # XC = 1 + 3/2 MAX(OPTS.SIZES). For any other scale we pick XMIN so
        # that XMIN + 3/2 SIZE = 1 + 3/2 MAX(OPTS.SIZES).
        # In pracrice, the offset must be integer ('bounds'), so the
        # alignment works properly only if all OPTS.SZES are even or odd.

        off = floor(3.0 / 2 * (max(opts.sizes) - size_of_spatial_bins)) + 1

        # smooth the image to the appropriate scale based on the size
        # of the SIFT bins
        sigma = size_of_spatial_bins / float(opts.magnif)
        ims = vl_imsmooth(im, sigma)

        # extract dense SIFT features from all channels
        frames = []
        descrs = []
        for k in range(numChannels):
            size_of_spatial_bins = int(size_of_spatial_bins)
            # vl_dsift does not accept numpy.int64 or similar
            f_temp, d_temp = vl_dsift(data=ims[:, :, k],
                                      step=dsiftOpts.step,
                                      size=size_of_spatial_bins,
                                      fast=dsiftOpts.fast,
                                      verbose=dsiftOpts.verbose,
                                      norm=dsiftOpts.norm,
                                      bounds=[off, off, maxint, maxint])
            frames.append(f_temp)
            descrs.append(d_temp)
        frames = array(frames)
        descrs = array(descrs)
        d_new_shape = [descrs.shape[0] * descrs.shape[1], descrs.shape[2]]
        descrs = descrs.reshape(d_new_shape)
        # remove low contrast descriptors
        # note that for color descriptors the V component is
        # thresholded
        if (opts.color == 'gray') | (opts.color == 'opponent'):
            contrast = frames[0][2, :]
        elif opts.color == 'rgb':
            contrast = mean(
                [frames[0][2, :], frames[1][2, :], frames[2][2, :]], 0)
        else:
            raise ValueError('Color option ' + str(opts.color) +
                             ' not recognized')
        descrs[:, contrast < opts.contrastthreshold] = 0

        # save only x,y, and the scale
        frames_temp = array(frames[0][0:3, :])
        padding = array(size_of_spatial_bins * ones(frames[0][0].shape))
        frames_all.append(vstack([frames_temp, padding]))
        descrs_all.append(array(descrs))

    frames_all = hstack(frames_all)
    descrs_all = hstack(descrs_all)
    return frames_all, descrs_all
예제 #34
0
 def __init__(self, series1, series2, wave1, wave2, *args, **kwargs):
     series = sp.vstack((series1.ravel(), series2.ravel())).T
     wave = sp.dstack((wave1, wave2))
     WaveletTransform.__init__(self, series, wave, *args, **kwargs)
예제 #35
0
파일: cyclone.py 프로젝트: PMBio/cyclone
	def plotScatter(self, file_name = 'scatter_ts',out_dir = './cache', plot_test = True, xaxis = 0, yaxis = 2, xlab = 'G1 score', ylab = 'G2M score', class_labels = ['G1 phase','S phase','G2M phase'], method = 'RF', decision_lines=True):	
		plparams = {'backend': 'pdf',
		  'axes.labelsize': 14,
		  'text.fontsize': 14,
		  'legend.fontsize': 13,
		  'xtick.labelsize': 14,
		  'ytick.labelsize': 14,
		  'text.usetex': False}
		PL.rcParams.update(plparams)
		assert self.scores != None, 'cyclone: first train the model before attempting to plot'
		if not os.path.exists(out_dir):
			os.makedirs(out_dir)
		file_name = file_name+method+'.pdf'
		if plot_test==False:
			file_name = file_name+method+'_cv.pdf'
		if plot_test ==True:
			if method=='RF':
				labs = self.labels_tst
				scores = self.scores_tst
			elif method=='LR':
				labs = self.labels_tst
				scores = self.scoresLR_tst
			elif method=='LRall':
				labs = self.labels_tst
				scores = self.scoresLRall_tst
			elif method=='GNB':
				labs = self.labels_tst
				scores = self.scoresGNB_tst
			elif method=='SVM':
				labs = self.labels_tst
				scores = self.scoresSVM_tst
			elif method=='SVMrbf':
				labs = self.labels_tst
				scores = self.scoresSVMrbf_tst
		else:	
			if method=='RF':
				labs = self.labels
				scores = self.scores
			elif method=='LR':
				labs = self.labels
				scores = self.scoresLR
			elif method=='LRall':
				labs = self.labels
				scores = self.scoresLRall
			elif method=='GNB':
				labs = self.labels
				scores = self.scoresGNB
			elif method=='SVM':
				labs = self.labels
				scores = self.scoresSVM
			elif method=='SVMrbf':
				labs = self.labels
				scores = self.scoresSVMrbf
		cols = ['r', 'b', 'g', 'y', 'Crimson', 'DeepPink','LightSalmon','Lime', 'Olive']
		cols_d = {}
		cols_d['G1'] = '#1b9e77'
		cols_d['S'] = '#d95f02'
		cols_d['G2M'] = '#7570b3'
		cols_d['mid'] = '#e7298a'
		cols_d['early'] = '#e6ab02'
		cols_d['late'] = '#66a61e'

		labs = labs.astype('int')
		lab_col = list()
		fig = PL.figure(figsize=(6,6))
		ax = fig.add_subplot(111)
		#ax=PL.axes([0.25,0.25,0.65,0.65])
		ax.set_position([0.1,0.1,0.7,0.7])
		hList =list()
		for iplot in range(len(labs)):
			#hList.append(PL.plot(scores[iplot,xaxis],scores[iplot,yaxis],'.',markersize=15,c=cols[labs[iplot]-1], alpha=0.75))
			if class_labels[labs[iplot]-1] in cols_d.keys():
				hList.append(PL.plot(scores[iplot,xaxis],scores[iplot,yaxis],'.',markersize=15,c=cols_d[class_labels[labs[iplot]-1]], alpha=0.75))
			else:
				hList.append(PL.plot(scores[iplot,xaxis],scores[iplot,yaxis],'.',markersize=15,c='#8c510a', alpha=0.75))
		PL.xlabel(xlab)
		PL.ylabel(ylab)
		if xlab == 'G1 score':
			PL.xlim(xmin = 0.0, xmax = .95)
			x_max = 0.75
		else:
			x_max = scores[:,xaxis].max() #+ 0.05
			PL.xlim(xmax = x_max+0.05)
		if ylab == 'G2M score':
			PL.ylim(ymin = 0.0, ymax = .95)
			y_max = 0.75
		else:
			y_max = scores[:,yaxis].max() #+ 0.05
			PL.ylim(ymax = y_max+0.05)

		if decision_lines ==True:
			x_min = 0.0
			y_min = 0.0
			h = 0.001
			xx, yy = SP.meshgrid(SP.arange(x_min, x_max, h),
							 SP.arange(y_min, y_max, h))
			zz = 1 - (xx + yy)
			Z = SP.argmax(SP.dstack((xx,yy,zz)), 2)
			PL.contour(xx, yy, Z, levels = [0,1])

		legH=list()
		u_classes = SP.unique(labs)
		for ileg in u_classes:
			legH.append(hList[SP.where(labs==ileg)[0][0]][0])
		lh=PL.legend(legH,class_labels,loc='upper center',bbox_to_anchor=(0.5, 1.15),ncol=3, numpoints=1,scatterpoints=1)
		lh.set_frame_on(False)
		ax.spines["right"].set_visible(False)
		ax.spines["top"].set_visible(False)
		ax.get_xaxis().tick_bottom()
		PL.savefig(out_dir+'/'+file_name,bbox_extra_artists=[lh])#,bbox_inches='tight')
예제 #36
0
    Position2L_t_val, [t for t in range(2,
                                        sp.shape(Velocity1L_t_val)[1])], 1)
Velocity1L_firstSecond_val = sp.delete(
    Velocity1L_t_val, [t for t in range(2,
                                        sp.shape(Velocity1L_t_val)[1])], 1)
Velocity2L_firstSecond_val = sp.delete(
    Velocity2L_t_val, [t for t in range(2,
                                        sp.shape(Velocity1L_t_val)[1])], 1)

dt_Arr_val = sp.delete(
    sp.repeat(d_test['arr_17'][:numSamples, sp.newaxis],
              sp.shape(Velocity1L_t_val)[1],
              axis=1), [t for t in range(2,
                                         sp.shape(Velocity1L_t_val)[1])], 1)

initial_state = sp.dstack(
    (Position1L_firstSecond_val, Velocity1L_firstSecond_val))[:, 0, :]

plotThisX = [initial_state[0, 0]]
plotThisY = [initial_state[0, 1]]
statesCovered = [initial_state]
for i in range(100):
    predictFromThis = sp.array(
        [sp.append(statesCovered[i], dt_Arr_val[0, 0]) for j in range(1)])
    prediction = model.predict(predictFromThis)
    tempXArray = prediction[0, 0]
    tempYArray = prediction[0, 1]
    plotThisX.append(tempXArray)
    plotThisY.append(tempYArray)
    statesCovered.append(prediction)

plt.figure()
예제 #37
0
def armorf(x, ntrls, npts, p):
    from scipy import shape, array, matrix, zeros, disp, concatenate, eye, dstack
    from numpy import linalg  # for inverse and Cholesky factorization;
    import numpy as np
    inv = linalg.inv
    # Make name consistent with Matlab

    # Initialization
    x = matrix(x)
    [L, N] = shape(x)
    # L is the number of channels, N is the npts*ntrls
    R0 = R0f = R0b = pf = pb = pfb = ap = bp = En = matrix(zeros((L, L, 1)))
    # covariance matrix at 0,

    # calculate the covariance matrix?
    for i in range(ntrls):
        En = En + x[:, i * npts:(i + 1) * npts] * x[:, i * npts:
                                                    (i + 1) * npts].H
        ap = ap + x[:, i * npts + 1:(i + 1) * npts] * x[:, i * npts + 1:
                                                        (i + 1) * npts].H
        bp = bp + x[:, i * npts:(i + 1) * npts - 1] * x[:, i * npts:
                                                        (i + 1) * npts - 1].H

    ap = inv((ckchol(ap / ntrls * (npts - 1)).T).H)
    bp = inv((ckchol(bp / ntrls * (npts - 1)).T).H)

    for i in range(ntrls):
        efp = ap * x[:, i * npts + 1:(i + 1) * npts]
        ebp = bp * x[:, i * npts:(i + 1) * npts - 1]
        pf = pf + efp * efp.H
        pb = pb + ebp * ebp.H
        pfb = pfb + efp * ebp.H

    En = (ckchol(En / N).T).H
    # Covariance of the noise

    # Initial output variables
    tmp = []
    for i in range(L):
        tmp.append(
            [])  # In Matlab, coeff=[], and anything can be appended to that.
    coeff = matrix(tmp)
    #  Coefficient matrices of the AR model
    kr = matrix(tmp)
    # reflection coefficients
    aparr = array(ap)  # Convert AP matrix to an array, so it can be dstacked
    bparr = array(bp)

    for m in range(p):
        # Calculate the next order reflection (parcor) coefficient
        ck = inv((ckchol(pf).T).H) * pfb * inv(ckchol(pb).T)
        kr = concatenate((kr, ck), 1)
        # Update the forward and backward prediction errors
        ef = eye(L) - ck * ck.H
        eb = eye(L) - ck.H * ck

        # Update the prediction error
        En = En * (ckchol(ef).T).H
        E = (ef + eb) / 2

        # Update the coefficients of the forward and backward prediction errors
        Z = zeros((L, L))  # Make it easier to define this
        aparr = dstack((aparr, Z))
        bparr = dstack((bparr, Z))
        pf = pb = pfb = Z
        # Do some variable juggling to handle Python's array/matrix limitations
        a = b = zeros((L, L, 0))

        for i in range(m + 2):
            tmpap1 = matrix(
                aparr[:, :, i]
            )  # Need to convert back to matrix to perform operations
            tmpbp1 = matrix(bparr[:, :, i])
            tmpap2 = matrix(aparr[:, :, m + 1 - i])
            tmpbp2 = matrix(bparr[:, :, m + 1 - i])
            tmpa = inv((ckchol(ef).T).H) * (tmpap1 - ck * tmpbp2)
            tmpb = inv((ckchol(eb).T).H) * (tmpbp1 - ck.H * tmpap2)
            a = dstack((a, array(tmpa)))
            b = dstack((b, array(tmpb)))

        for k in range(ntrls):
            efp = zeros((L, npts - m - 2))
            ebp = zeros((L, npts - m - 2))
            for i in range(m + 2):
                k1 = m + 2 - i + k * npts
                k2 = npts - i + k * npts
                efp = efp + matrix(a[:, :, i]) * matrix(x[:, k1:k2])
                ebp = ebp + matrix(b[:, :, m + 1 - i]) * matrix(
                    x[:, k1 - 1:k2 - 1])
            pf = pf + efp * efp.H
            pb = pb + ebp * ebp.H
            pfb = pfb + efp * ebp.H

        aparr = a
        bparr = b

    for j in range(p):
        coeff = concatenate(
            (coeff, inv(matrix(a[:, :, 0])) * matrix(a[:, :, j + 1])), 1)

    return coeff, En * En.H, kr
# m2_Arr_val= sp.delete( sp.repeat(d_test['arr_16'][:numSamples,sp.newaxis],sp.shape(Velocity1L_t)[1],axis=1),
#                        [t for t in range(1,sp.shape(Velocity1L_t_val)[1]-1)],1)
# dt_Arr_val=sp.repeat(d_test['arr_17'][:numSamples,sp.newaxis],T,axis=1)
E_i_val = d_test['arr_4'][:numSamples]
E_f_val = d_test['arr_5'][:numSamples]
p_x_i_val = d_test['arr_6'][:numSamples]
p_x_f_val = d_test['arr_7'][:numSamples]
p_y_i_val = d_test['arr_8'][:numSamples]
p_y_f_val = d_test['arr_9'][:numSamples]

#===============================================================================================================
#   Collect Input and Output for Network
#===============================================================================================================

# Dimensions of (samples, timesteps, features)
input_Arr = sp.dstack((Position1L_first, Position2L_first, Velocity1L_first,
                       Velocity2L_first, radius_Arr))[:, 0, :]
target_Arr = isCollision_Arr
input_Arr_val = sp.dstack(
    (Position1L_first_val, Position2L_first_val, Velocity1L_first_val,
     Velocity2L_first_val, radius_Arr_val))[:, 0, :]
target_Arr_val = isCollision_Arr_val

#===============================================================================================================
#   Network
#===============================================================================================================

# Training existing model. Comment out if you do not wish to do this.
model = load_model("trainedModel_temp.hd5")

model = Sequential()
model.add(
예제 #39
0
파일: verify.py 프로젝트: ccwang12/spladder
def verify_all_events(ev, strain_idx=None, list_bam=None, event_type=None, CFG=None, out_fn=None):
    # (ev, counts) = verify_all_events(ev, strain_idx, list_bam, event_type, CFG) ;

    ### set parameters if called by rproc
    if strain_idx is None:
        PAR = ev
        ev = PAR['ev']
        strain_idx = PAR['strain_idx']
        list_bam = PAR['list_bam']
        if 'out_fn' in PAR:
            out_fn = PAR['out_fn']
        event_type = PAR['event_type']
        CFG = PAR['CFG']

    ### verify the events if demanded
    if CFG['verify_alt_events']:

        prune_tag = ''
        if CFG['do_prune']:
            prune_tag = '_pruned'
        validate_tag = ''
        if CFG['validate_splicegraphs']:
            validate_tag = '.validated'

        (genes, inserted) = cPickle.load(open('%s/spladder/genes_graph_conf%i.%s%s%s.pickle' % (CFG['out_dirname'], CFG['confidence_level'], CFG['merge_strategy'], validate_tag, prune_tag)))

        fn_count = '%s/spladder/genes_graph_conf%i.%s%s%s.count.pickle' % (CFG['out_dirname'], CFG['confidence_level'], CFG['merge_strategy'], validate_tag, prune_tag)
        ### load count index data from hdf5
        IN = h5py.File(fn_count, 'r')
        gene_ids_segs = IN['gene_ids_segs'][:]
        gene_ids_edges = IN['gene_ids_edges'][:]

        ### sort events by gene idx
        s_idx = sp.argsort([x.gene_idx for x in ev])
        ev = ev[s_idx]
        old_idx = sp.argsort(s_idx)

        ### find gene idx boundaries
        assert(isequal(gene_ids_segs, sp.sort(gene_ids_segs)))
        assert(isequal(gene_ids_edges, sp.sort(gene_ids_edges)))

        tmp, genes_f_idx_segs = sp.unique(gene_ids_segs, return_index=True)
        genes_l_idx_segs = sp.r_[genes_f_idx_segs[1:] - 1, gene_ids_segs.shape[0]]

        tmp, genes_f_idx_edges = sp.unique(gene_ids_edges, return_index=True)
        genes_l_idx_edges = sp.r_[genes_f_idx_edges[1:] - 1, gene_ids_edges.shape[0]]

        gr_idx_segs = 0
        gr_idx_edges = 0
        counts = []
        for i in range(ev.shape[0]):
            g_idx = ev[i].gene_idx

            while gene_ids_segs[genes_f_idx_segs[gr_idx_segs]] < g_idx:
                gr_idx_segs += 1
            assert(gene_ids_segs[genes_f_idx_segs[gr_idx_segs]] == g_idx)

            while gene_ids_edges[genes_f_idx_edges[gr_idx_edges]] < g_idx:
                gr_idx_edges += 1
            assert(gene_ids_edges[genes_f_idx_edges[gr_idx_edges]] == g_idx)

            ### laod relevant count data from HDF5
            segments = IN['segments'][genes_f_idx_segs[gr_idx_segs]:genes_l_idx_segs[gr_idx_segs]+1, strain_idx]
            seg_pos = IN['seg_pos'][genes_f_idx_segs[gr_idx_segs]:genes_l_idx_segs[gr_idx_segs]+1, strain_idx]
            edges = IN['edges'][genes_f_idx_edges[gr_idx_edges]:genes_l_idx_edges[gr_idx_edges]+1, strain_idx]
            edge_idx = IN['edge_idx'][genes_f_idx_edges[gr_idx_edges]:genes_l_idx_edges[gr_idx_edges]+1]

            for s_idx in range(len(strain_idx)):
                print '%i/%i\r' % (s_idx, len(strain_idx))
               # ev_tmp.subset_strain(s_idx) ### TODO 
                if event_type == 'exon_skip':
                    ver, info = verify_exon_skip(ev[i], genes[g_idx], segments[:, s_idx].T,  sp.c_[edge_idx, edges[:, s_idx]], CFG)
                elif event_type in ['alt_3prime', 'alt_5prime']:
                    ver, info = verify_alt_prime(ev[i], genes[g_idx], segments[:, s_idx].T,  sp.c_[edge_idx, edges[:, s_idx]], CFG)
                elif event_type == 'intron_retention':
                    ver, info = verify_intron_retention(ev[i], genes[g_idx], segments[:, s_idx].T,  sp.c_[edge_idx, edges[:, s_idx]], seg_pos[:, s_idx].T, CFG)
                elif event_type == 'mult_exon_skip':
                    ver, info = verify_mult_exon_skip(ev[i], genes[g_idx], segments[:, s_idx].T,  sp.c_[edge_idx, edges[:, s_idx]], CFG)
                elif event_type == 'mutex_exons':
                    ver, info = verify_mutex_exons(ev[i], genes[g_idx], segments[:, s_idx].T,  sp.c_[edge_idx, edges[:, s_idx]], CFG)

                ev[i].verified.append(ver)
                if s_idx == 0:
                    counts.append(sp.array([info]))
                else:
                    counts[-1] = sp.r_[counts[-1], sp.array([info])]
            ev[i].verified = sp.array(ev[i].verified)

        IN.close()
        counts = sp.dstack(counts)
    ev = ev[old_idx]
    counts = counts[:, :, old_idx]

    if out_fn is not None:
        cPickle.dump((ev, counts), open(out_fn, 'w'))

    return (ev, counts)
예제 #40
0
파일: __main__.py 프로젝트: jlettvin/rpna
 def project(self):
     self.copy(self.screen, dstack(self.scale * self.target))
     if kw.get('verbose', False):
         self.tell(self.screen)
         self.tell()
     ShowImage("RPN", self.paste)
예제 #41
0
def vl_phow(im,
            verbose=False,
            fast=True,
            sizes=[4, 6, 8, 10],
            step=2,
            color='rgb',
            floatdescriptors=False,
            magnif=6,
            windowsize=1.5,
            contrastthreshold=0.005):

    opts = Options(verbose, fast, sizes, step, color, floatdescriptors, magnif,
                   windowsize, contrastthreshold)
    dsiftOpts = DSiftOptions(opts)
    im = array(im, 'float32')
    imageSize = shape(im)
    if im.ndim == 3:
        if imageSize[2] != 3:
            raise ValueError("Image data in unknown format/shape")
    if opts.color == 'gray':
        numChannels = 1
        if (im.ndim == 2):
            im = vl_rgb2gray(im)
    else:
        numChannels = 3
        if (im.ndim == 2):
            im = dstack([im, im, im])
        if opts.color == 'rgb':
            pass
        elif opts.color == 'opponent':
            mu = 0.3 * im[:, :, 0] + 0.59 * im[:, :, 1] + 0.11 * im[:, :, 2]
            alpha = 0.01
            im = dstack([
                mu, (im[:, :, 0] - im[:, :, 1]) / sqrt(2) + alpha * mu,
                (im[:, :, 0] + im[:, :, 1] - 2 * im[:, :, 2]) / sqrt(6) +
                alpha * mu
            ])
        else:
            raise ValueError('Color option ' + str(opts.color) +
                             ' not recognized')
    if opts.verbose:
        print('{0}: color space: {1}'.format('vl_phow', opts.color))
        print('{0}: image size: {1} x {2}'.format('vl_phow', imageSize[0],
                                                  imageSize[1]))
        print('{0}: sizes: [{1}]'.format('vl_phow', opts.sizes))

    frames_all = []
    descrs_all = []
    for size_of_spatial_bins in opts.sizes:
        off = floor(3.0 / 2 * (max(opts.sizes) - size_of_spatial_bins)) + 1
        sigma = size_of_spatial_bins / float(opts.magnif)
        ims = vl_imsmooth(im, sigma)
        frames = []
        descrs = []
        for k in range(numChannels):
            size_of_spatial_bins = int(size_of_spatial_bins)
            f_temp, d_temp = vl_dsift(
                image=ims[:, :, k],
                step=dsiftOpts.step,
                size=size_of_spatial_bins,
                fast=dsiftOpts.fast,
                verbose=dsiftOpts.verbose,
                norm=dsiftOpts.norm,
            )
            frames.append(f_temp.T)
            descrs.append(d_temp.T)
        frames = array(frames)
        descrs = array(descrs)
        d_new_shape = [descrs.shape[0] * descrs.shape[1], descrs.shape[2]]
        descrs = descrs.reshape(d_new_shape)
        if (opts.color == 'gray') | (opts.color == 'opponent'):
            contrast = frames[0][2, :]
        elif opts.color == 'rgb':
            contrast = mean(
                [frames[0][2, :], frames[1][2, :], frames[2][2, :]], 0)
        else:
            raise ValueError('Color option ' + str(opts.color) +
                             ' not recognized')
        descrs = descrs[:, contrast > opts.contrastthreshold]
        frames = frames[0][:, contrast > opts.contrastthreshold]
        frames_temp = array(frames[0:3, :])
        padding = array(size_of_spatial_bins * ones(frames[0].shape))
        frames_to_add = vstack([frames_temp, padding])

        frames_all.append(vstack([frames_temp, padding]))
        descrs_all.append(array(descrs))

    frames_all = hstack(frames_all)
    descrs_all = hstack(descrs_all)
    return frames_all.T[:, :2], descrs_all.T
예제 #42
0
damaged_pixels = sp.where(mask == 1)
'''
Se hace la interpolacion con Spline para cada capa de la imagen y luego se
unen para visualizar el Spline "total".
'''

splR = inter.SmoothBivariateSpline(good_pixels[0], good_pixels[1],
                                   stamp[good_pixels[0], good_pixels[1], 0])
splG = inter.SmoothBivariateSpline(good_pixels[0], good_pixels[1],
                                   stamp[good_pixels[0], good_pixels[1], 1])
splB = inter.SmoothBivariateSpline(good_pixels[0], good_pixels[1],
                                   stamp[good_pixels[0], good_pixels[1], 2])

new_img = sp.dstack([
    splR(stamp_xy[0], stamp_xy[1]),
    splG(stamp_xy[0], stamp_xy[1]),
    splB(stamp_xy[0], stamp_xy[1])
])

plt.imshow(new_img)
'''
Se arregla la imagen cambiando los pixeles muertos por los nuevos pixeles aproximados.
'''
fxdR = img[:, :, 0]
fxdG = img[:, :, 1]
fxdB = img[:, :, 2]

fxdR[damaged_pixels[0],
     damaged_pixels[1]] = new_img[damaged_pixels_stamp[0],
                                  damaged_pixels_stamp[1], 0]
fxdG[damaged_pixels[0],