Exemplo n.º 1
0
def load_directory(path,
                   pattern='.txt',
                   sep=None,
                   comment_char='#',
                   dt=None,
                   t_unit='',
                   coord_unit='',
                   **attrs):
    """
	load_directory(path,pattern = '.txt',sep = None,comment_char = '#',dt=None,t_unit='',**attrs)
	loads all the trajectories listed in 'path', which have the same 'pattern'.
	columns are separated by 'sep' (default is None: a indefinite number of 
	white spaces). Comments in the trajectory start with 'comment_char'.
	**attrs is used to assign columns to the trajectory attributes and to 
	add annotations. 
	If the time interval is added (and 't' is not called in the **attrs) 
	then the time column 't' is added, and the 't_unit' can be set.
	If 'coord' is called then the unit must be added.
	"""

    if ('coord' in attrs.keys()) & (len(coord_unit) == 0):
        raise AttributeError(
            'Please, specify the coordinate unit \'coord_unit\'')
    if ('t' in attrs.keys()) & (len(t_unit) == 0):
        raise AttributeError('Please, specify the time unit \'t_unit\'')
    if (dt != None) & (len(t_unit) == 0):
        raise AttributeError('Please, specify the time unit \'t_unit\'')
    if (dt != None) & ('t' in attrs.keys()):
        raise AttributeError(
            'Time is already loaded by the trajectories, you cannot also compute it from frames. Please, either remove the dt option or do not load the \'t\' column from the trajectories'
        )
    trajectories = []  #the list of trajectories
    if (pattern[len(pattern) - 1] == '$'):
        files = [f for f in os.listdir(path) if f.endswith(pattern[:-1])
                 ]  #list all the files in path that have pattern
    else:
        files = [f for f in os.listdir(path) if pattern in f
                 ]  #list all the files in path that have pattern

    for file in files:
        trajectory = Traj(experiment=path,
                          path=os.getcwd() + '/' + path,
                          file=file)
        trajectory.load(path + '/' + file,
                        sep=sep,
                        comment_char=comment_char,
                        **attrs)
        if (dt != None):
            trajectory.time(dt, t_unit)
        if ('coord' in attrs.keys()):
            trajectory.annotations('coord_unit', coord_unit)
        trajectory.fill()
        trajectory.norm_f()
        trajectories.append(trajectory)
    return trajectories
Exemplo n.º 2
0
        def interpolation(to_interpolate, delta_t, k=3):

            interpolated_traj = Traj(interpolated='True')
            interpolated_traj.annotations(to_interpolate.annotations())
            interpolated_traj.annotations()['delta_t'] = delta_t

            l = len(to_interpolate)

            if not l > k:

                #UnivariateSpline requires that m > k, where m is the number of points interpolated
                #and k is the degree of smoothing spline. Default in UnivatiateSpline and in here is k = 3.
                k = l - 1

            #the new time intervals for the trajectory interpolated
            t = [to_interpolate.start()]
            while (t[len(t) - 1] <= to_interpolate.end()):
                t.append(t[len(t) - 1] + delta_t)

            interpolated_traj.input_values(
                name='t', x=t, unit=to_interpolate.annotations()['t_unit'])

            for attribute in to_interpolate.attributes():

                if attribute in ['f', 'mol']:

                    s = UnivariateSpline(to_interpolate.t(),
                                         getattr(to_interpolate,
                                                 '_' + attribute),
                                         k=k)
                    interpolated_traj.input_values(name=attribute,
                                                   x=s(interpolated_traj.t()))

                if attribute == 'coord':

                    s_x = UnivariateSpline(to_interpolate.t(),
                                           to_interpolate.coord()[0],
                                           k=k)
                    s_y = UnivariateSpline(to_interpolate.t(),
                                           to_interpolate.coord()[1],
                                           k=k)
                    interpolated_traj.input_values(
                        name='coord',
                        x=[
                            s_x(interpolated_traj.t()),
                            s_y(interpolated_traj.t())
                        ],
                    )

            return (interpolated_traj)
Exemplo n.º 3
0
def align(path_target,
          path_reference,
          ch1,
          ch2,
          fimax1=False,
          fimax2=False,
          fimax_filter=[-3 / 35, 12 / 35, 17 / 35, 12 / 35, -3 / 35]):
    """
	align( path_target , path_reference , ch1 , ch2 , ):
	aligns in space and in time the trajectories identified by path_target to path_reference,
	which is the reference trajectory. As a convention within the align function trajectories 
	labeled with 1 are the target trajectories that need to be ligned to the reference 
	trajectories, which are labelled with 2.The alignment uses the trajectories in ch1 
	and ch2, which have been acquired simultaneously and whose alignment has been 
	corrected for chormatic aberrations and imaging misalignments. 'ch1' refers to 
	the trajectories that need to be aligned to the average trajectory in 'path_target'. 
	'ch2' refers to 'path_reference'. Both the target and the reference trajectories can be
	aligned using only the trajectory information up to the peak of fluorescence intensity by 
	setting fimax1 and fimax2 to True, respectively. If fimax1 and/or fimax2 are true, then fimax_filer
	is used to compute where the peak of fluorescence intensity is. If no filter is desired, set 
	fimax_filer = [ 1 ].
	"""
    def spline(t1, t2):
        """
		interpolate t1 or t2 with a spline.
		"""

        #the interpolation function
        def interpolation(to_interpolate, delta_t, k=3):

            interpolated_traj = Traj(interpolated='True')
            interpolated_traj.annotations(to_interpolate.annotations())
            interpolated_traj.annotations()['delta_t'] = delta_t

            l = len(to_interpolate)

            if not l > k:

                #UnivariateSpline requires that m > k, where m is the number of points interpolated
                #and k is the degree of smoothing spline. Default in UnivatiateSpline and in here is k = 3.
                k = l - 1

            #the new time intervals for the trajectory interpolated
            t = [to_interpolate.start()]
            while (t[len(t) - 1] <= to_interpolate.end()):
                t.append(t[len(t) - 1] + delta_t)

            interpolated_traj.input_values(
                name='t', x=t, unit=to_interpolate.annotations()['t_unit'])

            for attribute in to_interpolate.attributes():

                if attribute in ['f', 'mol']:

                    s = UnivariateSpline(to_interpolate.t(),
                                         getattr(to_interpolate,
                                                 '_' + attribute),
                                         k=k)
                    interpolated_traj.input_values(name=attribute,
                                                   x=s(interpolated_traj.t()))

                if attribute == 'coord':

                    s_x = UnivariateSpline(to_interpolate.t(),
                                           to_interpolate.coord()[0],
                                           k=k)
                    s_y = UnivariateSpline(to_interpolate.t(),
                                           to_interpolate.coord()[1],
                                           k=k)
                    interpolated_traj.input_values(
                        name='coord',
                        x=[
                            s_x(interpolated_traj.t()),
                            s_y(interpolated_traj.t())
                        ],
                    )

            return (interpolated_traj)

        #the trajectory with the largest delta_t will be the one that will
        #be splined.

        if t1.annotations()['delta_t'] >= t2.annotations()['delta_t']:

            delta_t = float(t2.annotations()['delta_t'])

        else:

            delta_t = float(t1.annotations()['delta_t'])

        not_nan = [i for i in range(len(t1)) if t1.f(i) == t1.f(i)]
        t1_to_interpolate = t1.extract(not_nan)

        not_nan = [i for i in range(len(t2)) if t2.f(i) == t2.f(i)]
        t2_to_interpolate = t2.extract(not_nan)

        return (interpolation(t1_to_interpolate, delta_t),
                interpolation(t2_to_interpolate, delta_t))

    def cc(input_t1, input_t2):
        """
		cc( input_t1 , input_t2 ) returns the time lag between the trajectory input_t1 and the trajectory input_t2,
		computed from the cross correlation of the fluorescence intensities of the two trajectories. 
		The trajectory input_t2 will be aligned in time to input_t1 by adding the output of cc to input_t2.t()
		"""

        t1 = cp.deepcopy(input_t1)
        t2 = cp.deepcopy(input_t2)

        if t1.annotations()['delta_t'] != t2.annotations()['delta_t']:
            raise AttributeError(
                'The two trajectories have different \'delta_t\' ')
        else:
            delta_t = t1.annotations()['delta_t']

        #extend t1 to be as long as to include the equivalent
        #of t2 lifetime as NA before and after it:

        t1.start(t1.start() - t2.lifetime())
        t1.end(t1.end() + t2.lifetime())

        #align the two trajectories to the same start point
        lag0 = t1.start() - t2.start()
        t2.input_values('t', t2.t() + lag0)

        output = []
        while t2.end() <= t1.end():

            #because of rounding errors I cannot use:
            #f1 = [ t1.f( i ) for i in range( len( t1 ) ) if ( t1.t( i ) >= t2.start() ) & ( t1.t( i ) <= t2.end() ) ]
            #but the following, where instead of greater than... I use >< delta_t / 2
            f1 = [
                t1.f(i) for i in range(len(t1))
                if (t1.t(i) - t2.start() > -delta_t / 2)
                & (t1.t(i) - t2.end() < delta_t / 2)
            ]

            if len(f1) != len(t2):
                raise IndexError(
                    "There is a problem with the selection of t1 fluorescence intensities and t2 length in the cross-correlation function cc. The lengths do not match."
                )

            output.append(
                sum([
                    f1[i] * t2.f(i) for i in range(len(t2))
                    if (f1[i] == f1[i]) & (t2.f(i) == t2.f(i))
                ]))

            t2.lag(1)

        return (lag0 + output.index(max(output)) * t1.annotations()['delta_t'])

    def unify_start_and_end(t1, t2):
        """
		Uniform the start and the end of two trajectories that overlap
		in time, so that the overlapping time points can be used to compute the
		rotation and translation that aligns the two trajectories together.
		"""

        if t1.annotations()['delta_t'] != t2.annotations()['delta_t']:
            raise AttributeError(
                'The trajectoires inputed in unify_start_and_end \
					have different delta_t')
        if t1.start() >= t2.end():
            raise AttributeError(
                'The trajectory t1 inputed in unify_start_and_end \
					starts after the trajectory t2. The two trajectories must significantly overlap'
            )
        if t2.start() >= t1.end():
            raise AttributeError(
                'The trajectory t2 inputed in unify_start_and_end \
					starts after the trajectory t2. The two trajectories must significantly overlap'
            )

        if t1.start() < t2.start():
            t1.start(t2.start())
        else:
            t2.start(t1.start())
        if t1.end() < t2.end():
            t2.end(t1.end())
        else:
            t1.end(t2.end())

        return ()

    def R(angle):

        return (np.matrix([[np.cos(angle), -np.sin(angle)],
                           [np.sin(angle), np.cos(angle)]],
                          dtype='float64'))

    #-------------------------END-OF-DEFINITIONS--------------------------------

    header()

    target_trajectory = Traj()
    target_trajectory.load(path_target)

    reference_trajectory = Traj()
    reference_trajectory.load(path_reference)

    #################################################################################################################
    #average trajectories are centered on their center of mass and must have been previously lied down
    #(lie_down function in trajalign/average.py) so that they are orientad in the same way. The
    # average transformation that align the left trajectory to the rigth trajectory (we use the notation in
    #Horn 1987 and Picco 2015) is only true for small rotations and it is important to minimise inaccuracies
    #that can derive from the approximation of the rotation and traslation. For more details see
    #Picco et al. 2015, Material and Methods, Two color alignment procedure, Estimate of the average trasformations).
    #################################################################################################################

    #	t1_center_of_mass = t1.center_mass()
    #	t1.translate( - t1_center_of_mass )
    #	t2.translate( - t2.center_mass() )

    if (fimax1):

        print(
            'fimax1 = True ; the software uses only the information of the target trajectory up to its peak of fluorescence intensity.'
        )

        t1 = target_trajectory.fimax(fimax_filter)

    else:

        t1 = target_trajectory

    t1_center_mass = t1.center_mass()
    t1.translate(-t1_center_mass)

    if (fimax2):

        print(
            'fimax2 = True ; the software uses only the information of the reference trajectory up to its peak of fluorescence intensity.'
        )

        t2 = reference_trajectory.fimax(fimax_filter)

    else:

        t2 = reference_trajectory

    t2_center_mass = t2.center_mass()
    t2.translate(-t2_center_mass)

    l = len(ch1)

    #control that the dataset of loaded trajectories is complete
    if l != len(ch2):
        raise IndexError(
            'The number of trajectories for ch1 and for ch2 differ.')

    #define the dictionary where the transformations will be stored
    T = {'angle': [], 'translation': [], 'lag': []}

    #compute the transformations that align t1 and t2 together.
    for i in range(l):

        print("Align " + path_target + " to " + ch1[i].annotations()['file'] +
              " and " + path_reference + " to " + ch2[i].annotations()['file'])

        #spline the trajectories, to reduce the noise
        if (fimax1):
            spline_t1, spline_ch1 = spline(t1, ch1[i].fimax(fimax_filter))
        else:
            spline_t1, spline_ch1 = spline(t1, ch1[i])

        if (fimax2):
            spline_t2, spline_ch2 = spline(t2, ch2[i].fimax(fimax_filter))
        else:
            spline_t2, spline_ch2 = spline(t2, ch2[i])

        #lag t1
        ch1_lag = cc(spline_t1, spline_ch1)
        spline_ch1.input_values('t', spline_ch1.t() + ch1_lag)

        #lag t2
        ch2_lag = cc(spline_t2, spline_ch2)
        spline_ch2.input_values('t', spline_ch2.t() + ch2_lag)

        #unify the start and the end of the trajectory splines that are paired to compute the rotation and translation.
        unify_start_and_end(spline_t1, spline_ch1)
        unify_start_and_end(spline_t2, spline_ch2)

        #NOTE: the weight used in Picco et al., 2015 is slightly different. To use the same weight one should replace spline_t1.f() with spline_t1.f() / ( spline_t1.coord_err()[ 0 ] * spline_t1.coord_err()[ 1 ] )
        align_ch1_to_t1 = MSD(spline_t1, spline_ch1)
        align_ch2_to_t2 = MSD(spline_t2, spline_ch2)

        #The tranformation that aligns t1 to t2 will be the transformation that align ch2 to t2 and the
        #inverse of the transformation that aligns ch1 to t1.
        #
        # R_2 @ R_1^{-1} @ ( t1 - t1.center_mass() ) + R_2 @ ( ch1.center_mass() - ch2.center_mass() ) + t2.center_mass()
        #
        #As the mean in MSD is weighted (see MSD in trajalign/average.py) the equation becomes
        #
        # R_2 @ R_1^{-1} @ ( t1 - align_ch1_to_t1[ 'rc' ] ) + R_2 @ ( align_ch1_to_t1[ 'lc' ] - align_ch2_to_t2[ 'lc' ] ) + align_ch2_to_t2[ 'rc' ]
        #
        #where align_ch1_to_t1[ 'rc' ], align_ch1_to_t1[ 'lc' ], align_ch2_to_t2[ 'rc' ] and align_ch2_to_t2[ 'lc' ] are
        #the estimates of the center of masses with the weight mean convention used in MSD.
        #Finally, the target and reference trajectory were initially shifted by
        #
        # - t2_center_mass
        #
        #and
        #
        # - t1_center_mass
        #
        #Therefore, the final transformation that align the target trajectory to the reference trajectory must be corrected for this initial shifts
        #
        # R_2 @ R_1^{-1} @ ( t1 - align_ch1_to_t1[ 'rc' ] ) + R_2 @ ( align_ch1_to_t1[ 'lc' ] - align_ch2_to_t2[ 'lc' ] ) + align_ch2_to_t2[ 'rc' ] +
        # + t2_center_mass + t1_center_mass
        #
        #NOTE: in eLife we used the geometrical center of mass, t1.center_mass(), and not the
        #approximation of the center of mass that best align t1 and ch1 under the weight convention in MSD, which is align_ch1_to_t1[ 'rc' ].
        #Therefore, in Picco et al, 2015
        #
        #	- R( T[ 'angle' ][ -1 ] ) @ align_ch1_to_t1[ 'rc' ]
        #
        #woud become
        #
        #	- R( T[ 'angle' ][ -1 ] ) @ t1.center_mass()
        #

        #Compute the angle as the atan2 of the sin( align_ch2_to_t2[ 'angle' ] - align_ch1_to_t1[ 'angle' ] )
        #and cos( align_ch2_to_t2[ 'angle' ] - align_ch1_to_t1[ 'angle' ] )
        a = np.sin(align_ch2_to_t2['angle']) * np.cos(
            align_ch1_to_t1['angle']) - np.cos(
                align_ch2_to_t2['angle']) * np.sin(align_ch1_to_t1['angle'])
        b = np.cos(align_ch2_to_t2['angle']) * np.cos(
            align_ch1_to_t1['angle']) + np.sin(
                align_ch2_to_t2['angle']) * np.sin(align_ch1_to_t1['angle'])
        T['angle'].append(np.arctan2(a, b))
        T[ 'translation' ].append( np.array(
          - R( T[ 'angle' ][ -1 ] ) @ align_ch1_to_t1[ 'rc' ]\
            + R( align_ch2_to_t2[ 'angle' ] ) @ ( align_ch1_to_t1[ 'lc' ] - align_ch2_to_t2[ 'lc' ] )\
            + align_ch2_to_t2[ 'rc' ] + t2_center_mass
          )[ 0 ] ) #the [ 0 ] is because otherwise it would be [[ x , y ]] instead of [ x , y ]
        T['lag'].append(ch2_lag - ch1_lag)

    #compute the median and the standard error (SE) of the transformations.
    #NOTE that if fimax2 is used, the center of mass of reference trajectory does not
    #correspond to the center of mass of the trajectory to which the target trajectory
    #is aligned. The target trajectory, in fact, is aligned to the center of mass of
    T_median = {
        'angle':
        np.median(T['angle']),
        'angle_SE':
        nanMAD(T['angle']) / np.sqrt(l),
        'translation': [
            np.median([T['translation'][i][0] for i in range(l)]),
            np.median([T['translation'][i][1] for i in range(l)])
        ],
        'translation_SE': [
            nanMAD([T['translation'][i][0] for i in range(l)]) / np.sqrt(l),
            nanMAD([T['translation'][i][1] for i in range(l)]) / np.sqrt(l)
        ],
        'lag':
        np.median(T['lag']),
        'lag_SE':
        nanMAD(T['lag']) / np.sqrt(l),
        'n':
        l
    }

    target_trajectory.rotate(T_median['angle'], angle_err=T_median['angle_SE'])
    target_trajectory.translate(T_median['translation'],
                                v_err=(T_median['translation_SE'][0],
                                       T_median['translation_SE'][1]))
    target_trajectory.input_values('t',
                                   target_trajectory.t() + T_median['lag'])

    dot_positions = [
        i for i in range(len(path_target)) if path_target[i] == '.'
    ]
    file_ending = dot_positions[
        len(dot_positions) -
        1]  #there could be more than one dot in the file name. Pick the last.
    file_name = path_target[0:file_ending] + '_aligned' + path_target[
        file_ending:len(path_target)]

    # annotations
    target_trajectory.annotations('aligned_to', str(path_reference))
    target_trajectory.annotations('original_file', str(path_target))
    target_trajectory.annotations('alignment_angle',
                                  str(T_median['angle']) + ' rad')
    target_trajectory.annotations('alignment_angle_SE',
                                  str(T_median['angle_SE']) + ' rad')
    target_trajectory.annotations(
        'alignment_translation',
        str(T_median['translation']) + ' ' +
        target_trajectory.annotations()['coord_unit'])
    target_trajectory.annotations(
        'alignment_translation_SE',
        str(T_median['translation_SE']) + ' ' +
        target_trajectory.annotations()['coord_unit'])
    target_trajectory.annotations(
        'alignment_lag',
        str(T_median['lag']) + ' ' + target_trajectory.annotations()['t_unit'])
    target_trajectory.annotations(
        'alignment_lag_SE',
        str(T_median['lag_SE']) + ' ' +
        target_trajectory.annotations()['t_unit'])

    target_trajectory.save(file_name)

    print('The trajectory aligned to ' + path_reference +
          ' has been saved as ' + file_name)
Exemplo n.º 4
0
def load_directory(path , pattern = '.txt' , sep = None , comment_char = '#' , dt = None , t_unit = '' , coord_unit = '' , intensity_normalisation = 'None' , **attrs ):

    """
    load_directory(path , pattern = '.txt' , sep = None , comment_char = '#' , dt = None , t_unit = '' , coord_unit = '' , intensity_normalisation = 'None' , **attrs ):
    loads all the trajectories listed in 'path', which have the same 'pattern'.
    columns are separated by 'sep' (default is None: a indefinite number of 
    white spaces). Comments in the trajectory start with 'comment_char'.
    
    intensity_normalisation can be: 'None' (no normalisation, default), 'Integral' (normalise over the integral of the fluorescence intensity), 
    or 'Absolute' (normalise the fluorescence intensity values between 0 and 1)"

    **attrs is used to assign columns to the trajectory attributes and to 
    add annotations. 
    If the time interval is added (and 't' is not called in the **attrs) 
    then the time column 't' is added, and the 't_unit' can be set.
    If 'coord' is called then the unit must be added.
    """

    if ('coord' in attrs.keys()) & (len(coord_unit) == 0): 
        raise AttributeError('Please, specify the coordinate unit \'coord_unit\'')
    if ('t' in attrs.keys()) & (len(t_unit) == 0): 
        raise AttributeError('Please, specify the time unit \'t_unit\'')
    if (dt != None) & (len(t_unit) == 0): 
        raise AttributeError('Please, specify the time unit \'t_unit\'')
    if (dt != None) & ('t' in attrs.keys()):
        raise AttributeError('Time is already loaded by the trajectories, you cannot also compute it from frames. Please, either remove the dt option or do not load the \'t\' column from the trajectories')

    trajectories = [] #the list of trajectories
    if ( pattern[ len( pattern ) - 1 ] == '$' ) : 
        files = [ f for f in sorted( os.listdir(path) ) if f.endswith( pattern[ : - 1 ] ) ] #list all the files in path that have pattern
    else : 
        files = [ f for f in sorted( os.listdir(path) ) if pattern in f] #list all the files in path that have pattern

    for file in files:
        trajectory = Traj(experiment = path, path = os.getcwd()+'/'+path, file = file)
        trajectory.load(path+'/'+file,sep = sep, comment_char = comment_char, **attrs)
        if (dt != None):
            trajectory.time(dt,t_unit)
        if ('coord' in attrs.keys()):

            trajectory.annotations('coord_unit',coord_unit)

        if intensity_normalisation == 'Integral' :
            
            trajectory.scale_f()

        elif intensity_normalisation == 'Absolute' :
        
            trajectory.norm_f()

        elif intensity_normalisation != 'None' :

            raise AttributeError( "load_directory: Please, choose a value for the variable intensity_normalisation between 'None' (no normalisation, default), 'Integral' (normalise over the integral of the fluorescence intensity), or 'Absolute' (normalise the fluorescence intensity values between 0 and 1)" )

        trajectory.annotations( 'intensity_normalisation' , intensity_normalisation )
        trajectory.fill()
        trajectories.append(trajectory)
    
    print( "\n >> load_directory: The 'intensity_normalisation' applied to the trajectories is '" + intensity_normalisation + "' <<\n" )

    return trajectories 
Exemplo n.º 5
0
def trajectory_average( aligned_trajectories_to_average , r , median , fimax ) :    

    #define the trajectory where the average will be stored
    t = Traj()

    #inherit the annotations from the reference trajectory
    for a in aligned_trajectories_to_average[ r ].annotations().keys():

        if a == 'file':
            t.annotations( 'reference_file' , aligned_trajectories_to_average[ r ].annotations()[ a ])
        else :
            t.annotations( a , aligned_trajectories_to_average[ r ].annotations()[ a ]) 

    if fimax :
        t.annotations( 'fimax' , 'TRUE' )

    #group all the attributes of the aligned trajectories...
    attributes = [ a for a in aligned_trajectories_to_average[ r ].attributes() if a not in ('t','frames')] 
    #create an empy dictionary where all the attributes that will be then averaged are stored
    attributes_to_be_averaged = {}
    for a in attributes:
        attributes_to_be_averaged[a] = []

    #merge all the trajectory attributes into the dictionary attributes_to_be_averaged.
    for j in range( len( aligned_trajectories_to_average ) ):
        
        for a in attributes:

            attributes_to_be_averaged[a].append(getattr(aligned_trajectories_to_average[ j ],'_'+a))

    #all the aligned trajectories are set to start at the same  mean_start and finish at mean_end computed from
    #trajectories_time_span in compute_average().Hence, the time interval is the same
    t.input_values( 't' , aligned_trajectories_to_average[ r ].t()) 
        
    #average the attributes of the trajectories and assign 
    #them to the average trajectory [ r ]
    with wr.catch_warnings():
        
        # if a line is made only of nan that are averaged, then a waring is outputed. Here we suppress such warnings.
        wr.simplefilter("ignore", category=RuntimeWarning)
    
        for a in attributes: 

            if a[ len( a ) - 4 : len( a ) ] == '_err' :
                
                raise AttributeError('The trajectories to be averaged have already an non empty error element, suggeting that they are already the result of an average. These error currently are not propagated. Check that your trajectories are correct')
            
            #if _a_err is in the trajectories slots, it means that the attribute a is not 
            #an error attribute (i.e. an attribute ending by _err; in fact if a would end by '_err'
            #then _a_err would have twice the appendix _err (i.e. _err_err) and would have 
            #no equivalent in the trajectory __slots__. If _a_err is then in the trajectory
            #slots, then both the mean and the sem can be computed. There is no sem without mean.

            if '_' + a + '_err' in t.__slots__:
                
                if median :

                    t.input_values( a ,
                            np.nanmedian( attributes_to_be_averaged[ a ], axis = 0 )
                        )

                else :

                    t.input_values( a ,
                            np.nanmean( attributes_to_be_averaged[ a ], axis = 0 )
                        )

                #if there is no n defined yet, it computes it
                if not t.n().any() : 
                    #compute the number of not-nan data points by dividing
                    #the nansum by the nanmean. The operation is performed
                    #on the last attribute in the loop that can either have 
                    #two dimensions (as 'coord') or one. In case of two dims
                    #only one is used to compute '_n'.
                    
                    x = np.nanmean( attributes_to_be_averaged[ a ] , axis = 0 )
                    y = np.nansum( attributes_to_be_averaged[a] , axis = 0 )
            
                    if len(x) == 2:
                        t.input_values( 'n' , y[0]/x[0] )
                    else:
                        with wr.catch_warnings():
                            # if both y[i] and x[i] are 0, then a waring is outputed. Here we suppress such warnings.
                            wr.simplefilter("ignore", category=RuntimeWarning)
                            t.input_values( 'n' , y/x )

                #compute the errors as standard errors of the mean/median
                try :
                    if median :

                        t.input_values( a + '_err' ,
                            nanMAD( attributes_to_be_averaged[ a ], axis = 0 ) / np.sqrt( t.n() )
                            )

                    else :

                        t.input_values( a + '_err' ,
                            np.nanstd( attributes_to_be_averaged[ a ], axis = 0 ) / np.sqrt( t.n() )
                            )

                except :

                    raise AttributeError( 'The attribute ' + a + ' cannot have its error assigned' )

            else :

                raise AttributeError( 'The attribute ' + a + ' is not recongnised as an attribute' )

    return( t )
Exemplo n.º 6
0
from trajalign.traj import Traj
from matplotlib import pyplot as plt

# when a trajectory is defined, we can enter annotations
t = Traj(what="my first trajectory", mood="today is a beautiful day")
print(t)
t.annotations('mood', 'all this seems pretty complicated, depressing!')

print(t.annotations()['what'])  # read the annotation 'what'

# set the time  attribute of the trajectory
t.input_values('t', [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1])
print(t)  #note that also the frames are automatically generated

t.annotations('t_unit', 's')
print(t)

t.input_values('coord', [[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1],
                         [1, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9]],
               unit='apples')
print(t)

# load a real trajectory. We define the new trajectory first
d = Traj(experiment='Sla1-GFP',
         date='01/01/2000',
         temperature='cold!',
         note='Happy new millenium')

d.load(file_name='trajectory_average_example/raw_trajectories/02.data',
       frames=0,
       coord=(1, 2),
Exemplo n.º 7
0
files = os.listdir('median')

plt.figure(1, figsize=(10, 8))

for f in files:
    t = Traj()
    t.load('median/' + f)

    plt.plot(t.t() - trj.start(), t.coord()[0], '-')

plt.plot(trj.t() - trj.start(), trj.coord()[0], 'w-', linewidth=5.5)
plt.plot(trj.t() - trj.start(),
         trj.coord()[0],
         'k-',
         linewidth=3,
         label='Average trajectory')

plt.xlim([-0.5, trj.end() - trj.start() + 0.5])
plt.ylim([-1, 3.2])

plt.ylabel('Inward movement (' + trj.annotations('coord_unit') + ')',
           fontsize=24)
plt.xlabel('Time (' + trj.annotations('t_unit') + ')', fontsize=24)
plt.title(str(len(files)) +
          ' trajectories\naligned in space and time and averaged',
          fontsize=24,
          verticalalignment='bottom')

plt.legend(loc='best')
plt.savefig('plot.png')
Exemplo n.º 8
0
sla1.input_values('t', sla1.t() - t_0)
rvs167.input_values('t', rvs167.t() - t_0)

#plot
f, (trj, fi) = plt.subplots(2,
                            1,
                            gridspec_kw={'height_ratios': [2, 1]},
                            figsize=(8, 11),
                            sharex=True)

myplot(trj, abp1, what='coord', col='#D7110E', label='Abp1')
myplot(trj, sla1, what='coord', col='#336CFF', label='Sla1')
myplot(trj, rvs167, what='coord', col='#006400', label='Rvs167')

myplot(fi, abp1, what='mol', col='#D7110E', label='Abp1')
myplot(fi, sla1, what='mol', col='#336CFF', label='Sla1')
myplot(fi, rvs167, what='mol', col='#006400', label='Rvs167')

plt.subplot(trj)
plt.ylabel('Inward movement (' + abp1.annotations()['coord_unit'] + ')',
           fontsize=24)
plt.legend(loc='best')

plt.subplot(fi)
plt.ylabel('Number of molecules', fontsize=24)
plt.xlabel('Time (' + abp1.annotations()['t_unit'] + ')', fontsize=24)
plt.legend(loc='best')

f.tight_layout()
f.savefig('plot_aligned_trajectories.png')