Ejemplo n.º 1
0
def set_ParDb():
    b, fo, ro = 'bend', 'front_orientation', 'rear_orientation'
    bv, fov, rov = nam.vel([b, fo, ro])
    ba, foa, roa = nam.acc([b, fo, ro])
    fou, rou = nam.unwrap([fo, ro])
    d, v, a = 'dst', 'vel', 'acc'
    sd, sv, sa = nam.scal([d, v, a])
    ld, lv, la = nam.lin([d, v, a])
    sld, slv, sla = nam.scal([ld, lv, la])
    std = nam.straight_dst(d)
    sstd = nam.scal(std)
    fv, fsv = nam.freq([v, sv])
    cum_d, cum_sd = nam.cum([d, sd])

    srd, pau, tur, fee = 'stride', 'pause', 'turn', 'feed'
    chunks = [srd, pau, tur, fee]
    srd_t, pau_t, tur_t, fee_t = nam.dur(chunks)
    srd_tr, pau_tr, tur_tr, fee_tr = nam.dur_ratio(chunks)
    srd_N, pau_N, tur_N, fee_N = nam.num(chunks)
    srd_d = nam.dst(srd)
    srd_sd = nam.scal(srd_d)
    dsp = 'dispersion'
    dsp40 = f'40sec_{dsp}'
    sdsp, sdsp40 = nam.scal([dsp, dsp40])
    f_dsp, f_dsp40, f_sdsp, f_sdsp40 = nam.final([dsp, dsp40, sdsp, sdsp40])
    mu_dsp, mu_dsp40, mu_sdsp, mu_sdsp40 = nam.mean([dsp, dsp40, sdsp, sdsp40])
    max_dsp, max_dsp40, max_sdsp, max_sdsp40 = nam.max(
        [dsp, dsp40, sdsp, sdsp40])
    srd_fo, srd_ro, srd_b = nam.chunk_track(srd, [fou, rou, b])

    l_angle = 'angle $(deg)$'
    l_angvel = 'angular velocity $(deg/sec)$'
    l_angacc = 'angular acceleration, $(deg^2/sec)$'
    l_time = 'time $(sec)$'
    l_time_ratio = 'time ratio $(-)$'
    l_freq = 'frequency $(Hz)$'
    l_dst = 'distance $(mm)$'
    l_body_length = 'body length $(mm)$'
    l_vel = 'velocity $(mm/sec)$'
    l_acc = 'acceleration $(mm/sec^2)$'
    l_sc_dst = 'scaled distance $(-)$'
    l_sc_vel = 'scaled velocity $(sec^{-1})$'
    l_sc_acc = 'scaled acceleration $(sec^{-2})$'
    l_num = 'counts $(#)$'
    l_mass = 'mass $(mg)$'

    sc_unit_dict = {l_dst: l_sc_dst, l_vel: l_sc_vel, l_acc: l_sc_acc}

    def generate_entries(bases, types):
        entry_dict = {
            'stride': [
                nam.chunk_track, {
                    'chunk_name': 'stride'
                }, 'str_', 'pre', sub, {
                    'q': 'str'
                }
            ],
            'dur': [nam.dur, {}, '_t', 'suf', sub, {
                'p': 't'
            }],
            'dur_ratio': [nam.dur_ratio, {}, '_tr', 'suf', sub, {
                'p': 'r'
            }],
            'lin': [nam.lin, {}, 'l', 'pre', sub, {
                'q': 'l'
            }],
            'mean': [nam.mean, {}, '_mu', 'suf', bar, {}],
            'std': [nam.std, {}, '_std', 'suf', wave, {}],
            'max': [nam.max, {}, '_max', 'suf', sub, {
                'q': 'max'
            }],
            'fin': [nam.final, {}, '_fin', 'suf', sub, {
                'q': 'fin'
            }],
            'scal': [nam.scal, {}, 's', 'pre', ast, {}]
        }
        entries = []
        for base in bases:
            for type in types:
                fn, sn, sym, esym, u = base
                conf = entry_dict[type]
                if type == 'dur':
                    nu = l_time
                elif type == 'dur_ratio':
                    nu = l_time_ratio
                elif type == 'scal':
                    nu = sc_unit_dict[u]
                else:
                    nu = u
                if conf[3] == 'suf':
                    nsn = f'{sn}{conf[2]}'
                elif conf[3] == 'pre':
                    nsn = f'{conf[2]}{sn}'
                try:
                    nsym, nesym = conf[4](p=sym, **conf[5]), conf[4](p=esym,
                                                                     **conf[5])
                except:
                    nsym, nesym = conf[4](q=sym, **conf[5]), conf[4](q=esym,
                                                                     **conf[5])
                try:
                    nfn = conf[0](fn, **conf[1])
                except:
                    nfn = conf[0](params=fn, **conf[1])
                entries.append([nfn, nsn, nsym, nesym, nu])

        return np.array(entries)

    cols = ['par', 'shortcut', 'symbol', 'exp_symbol', 'unit']

    temp_ang = [
        [b, 'b', 'b'],
        [fo, 'fo', r'or_{f}'],
        [ro, 'ro', r'or_{r}'],
    ]
    ang_ar = []
    for (fn, sn, sym) in temp_ang:
        ang_ar.append([fn, sn, th(sym), hat_th(sym), l_angle])
        ang_ar.append(
            [nam.vel(fn), f'{sn}v',
             dot_th(sym),
             dot_hat_th(sym), l_angvel])
        ang_ar.append(
            [nam.acc(fn), f'{sn}a',
             ddot_th(sym),
             ddot_hat_th(sym), l_angacc])
    ang_ar = np.array(ang_ar)

    lin_ar = np.array([
        [d, 'd', 'd', hat('d'), l_dst],
        [ld, 'ld', sub('d', 'l'),
         sub(hat('d'), 'l'), l_dst],
        [v, 'v', 'v', hat('v'), l_vel],
        [a, 'a', dot('v'), dot(hat('v')), l_acc],
        [lv, 'lv', sub('v', 'l'),
         sub(hat('v'), 'l'), l_vel],
        [la, 'la',
         sub(dot('v'), 'l'),
         sub(dot(hat('v')), 'l'), l_acc],
        [cum_d, 'cum_d',
         sub('d', 'cum'),
         sub(hat('d'), 'cum'), l_dst],
        [fv, 'fv', sub('f', 'v'),
         sub(hat('f'), 'v'), l_freq],
    ])

    sc_lin_ar = np.array([
        [sd, 'sd', sup('d', '*'),
         sup(hat('d'), '*'), l_sc_dst],
        [
            sld, 'sld',
            subsup('d', 'l', '*'),
            subsup(hat('d'), 'l', '*'), l_sc_dst
        ],
        [sv, 'sv', sup('v', '*'),
         sup(hat('v'), '*'), l_sc_vel],
        [sa, 'sa',
         sup(dot('v'), '*'),
         sup(dot(hat('v')), '*'), l_sc_acc],
        [
            slv, 'slv',
            subsup('v', 'l', '*'),
            subsup(hat('v'), 'l', '*'), l_sc_vel
        ],
        [
            sla, 'sla',
            subsup(dot('v'), 'l', '*'),
            subsup(dot(hat('v')), 'l', '*'), l_sc_acc
        ],
        [
            cum_sd, 'cum_sd',
            subsup('d', 'cum', '*'),
            subsup(hat('d'), 'cum', '*'), l_sc_dst
        ],
        [
            fsv, 'fsv',
            subsup('f', 'v', '*'),
            subsup(hat('f'), 'v', '*'), l_freq
        ],
    ])

    temp_chunk = [
        ['str', 'stride'],
        ['non_str', 'non_stride'],
        ['pau', 'pause'],
        ['tur', 'turn'],
        ['Ltur', 'Lturn'],
        ['Rtur', 'Rturn'],
        ['fee', 'feed'],
        ['chn', 'stridechain'],
    ]
    chunk_ar = []
    for (suf, cn) in temp_chunk:
        chunk_ar.append([
            nam.dst(cn), f'{suf}_d',
            sub('d', suf),
            sub(hat('d'), suf), l_dst
        ])
        chunk_ar.append([
            nam.scal(nam.dst(cn)), f'{suf}_sd',
            subsup('d', suf, '*'),
            subsup(hat('d'), suf, '*'), l_sc_dst
        ])
        chunk_ar.append([
            nam.straight_dst(cn), f'{suf}_std',
            subsup('d', suf, 'st'),
            subsup(hat('d'), suf, 'st'), l_dst
        ])
        chunk_ar.append([
            nam.scal(nam.straight_dst(cn)), f'{suf}_sstd',
            subsup('d', suf, 'st*'),
            subsup(hat('d'), suf, 'st*'), l_sc_dst
        ])
        chunk_ar.append(
            [nam.dur(cn), f'{suf}_t',
             sub('t', cn),
             sub(hat('t'), cn), l_time])
        chunk_ar.append([
            nam.mean(nam.dur(cn)), f'{suf}_t_mu',
            sub(bar('t'), cn),
            sub(bar(hat('t')), cn), l_time
        ])
        chunk_ar.append([
            nam.std(nam.dur(cn)), f'{suf}_t_std',
            sub(wave('t'), cn),
            sub(wave(hat('t')), cn), l_time
        ])
        chunk_ar.append([
            nam.cum(nam.dur(cn)), f'cum_{suf}_t',
            subsup('t', cn, 'cum'),
            subsup(hat('t'), cn, 'cum'), l_time
        ])
        chunk_ar.append([
            nam.max(nam.dur(cn)), f'{suf}_t_max',
            subsup('t', cn, 'm'),
            subsup(hat('t'), cn, 'm'), l_time
        ])
        chunk_ar.append([
            nam.start(cn), f'{suf}0',
            subsup('t', cn, 0),
            subsup(hat('t'), cn, 0), l_time
        ])
        chunk_ar.append([
            nam.stop(cn), f'{suf}1',
            subsup('t', cn, 1),
            subsup(hat('t'), cn, 1), l_time
        ])
        chunk_ar.append([
            nam.length(cn), f'{suf}_l',
            sub(cn, 'l'),
            sub(hat(cn), 'l'), l_num
        ])
        chunk_ar.append([
            nam.id(cn), f'{suf}_id',
            sub(cn, 'id'),
            sub(hat(cn), 'id'), l_num
        ])
        chunk_ar.append([
            nam.dur_ratio(cn), f'{suf}_tr',
            sub('r', cn),
            sub(hat('r'), cn), l_time_ratio
        ])
        chunk_ar.append([
            nam.num(cn), f'{suf}_N',
            sub('N', f'{cn}s'),
            sub(hat('N'), f'{cn}s'), f'# {cn}s'
        ])
    chunk_ar = np.array(chunk_ar)

    temp_dsp = [[dsp, 'disp', 'disp', hat('disp')],
                [dsp40, 'disp40',
                 sup('disp', 40),
                 sup(hat('disp'), 40)]]

    dsp_ar = []
    for (fn, sn, sym, esym) in temp_dsp:
        dsp_ar.append([fn, sn, sym, esym, l_dst])
        dsp_ar.append(
            [nam.scal(fn), f's{sn}',
             sup(sym, '*'),
             sup(esym, '*'), l_sc_dst])
        dsp_ar.append([nam.mean(fn), f'{sn}_mu', bar(sym), bar(esym), l_dst])
        dsp_ar.append([
            nam.scal(nam.mean(fn)), f's{sn}_mu',
            sup(bar(sym), '*'),
            sup(bar(esym), '*'), l_sc_dst
        ])
        dsp_ar.append([
            nam.max(fn), f'{sn}_max',
            sub(sym, 'max'),
            sub(esym, 'max'), l_dst
        ])
        dsp_ar.append([
            nam.scal(nam.max(fn)), f's{sn}_max',
            subsup(sym, 'max', '*'),
            subsup(esym, 'max', '*'), l_sc_dst
        ])
        dsp_ar.append([
            nam.final(fn), f'{sn}_fin',
            sub(sym, 'fin'),
            sub(esym, 'fin'), l_dst
        ])
        dsp_ar.append([
            nam.scal(nam.final(fn)), f's{sn}_fin',
            subsup(sym, 'fin', '*'),
            subsup(esym, 'fin', '*'), l_sc_dst
        ])

    dsp_ar = np.array(dsp_ar)

    par_ar = np.array([
        ['cum_dur', 'cum_t',
         sub('t', 'cum'),
         sub(hat('t'), 'cum'), l_time],
        ['length', 'l_mu',
         bar('l'), bar(hat('l')), l_body_length],
        [
            'stride_reoccurence_rate', 'str_rr',
            sub('str', 'rr'),
            sub(hat('str'), 'rr'), '-'
        ],
        ['length', 'l', 'l', hat('l'), l_body_length],
        [
            'amount_eaten', 'f_am',
            sub('m', 'feed'),
            sub(hat('m'), 'feed'), l_mass
        ],
        [
            'max_feed_amount', 'f_am_max',
            subsup('m', 'feed', 'm'),
            subsup(hat('m'), 'feed', 'm'), l_mass
        ],
        ['mass', 'm', 'm', hat('m'), l_mass],
        ['hunger', 'hunger', 'hunger',
         hat('hunger'), f'hunger (-)'],
        [
            'reserve_density', 'reserve_density', 'reserve_density',
            hat('reserve_density'), f'reserve density (-)'
        ],
        [
            'puppation_buffer', 'puppation_buffer', 'puppation_buffer',
            hat('puppation_buffer'), f'puppation buffer (-)'
        ],
        [
            'deb_f', 'deb_f',
            sub('f', 'deb'),
            sub(hat('f'), 'deb'), f'functional response (-)'
        ],
        [
            'deb_f_mean', 'deb_f_mu',
            sub(bar('f'), 'deb'),
            sub(hat(bar('f')), 'deb'), f'functional response (-)'
        ],
        [
            'Nlarvae', 'lar_N',
            sub('N', 'larvae'),
            sub(hat('N'), 'larvae'), f'# larvae'
        ],
    ])

    orient_ar = np.array([[
        f'turn_{fou}', 'tur_fo', r'$\theta_{turn}$', r'$\hat{\theta}_{turn}$',
        l_angle
    ],
                          [
                              f'Lturn_{fou}', 'Ltur_fo', r'$\theta_{Lturn}$',
                              r'$\hat{\theta}_{Lturn}$', l_angle
                          ],
                          [
                              f'Rturn_{fou}', 'Rtur_fo', r'$\theta_{Rturn}$',
                              r'$\hat{\theta}_{Rturn}$', l_angle
                          ],
                          [
                              srd_fo, 'str_fo', r'$\Delta{\theta}_{or_{f}}$',
                              r'$\Delta{\hat{\theta}}_{or_{f}}$', l_angle
                          ],
                          [
                              srd_ro, 'str_ro', r'$\Delta{\theta}_{or_{r}}$',
                              r'$\Delta{\hat{\theta}}_{or_{r}}$', l_angle
                          ],
                          [
                              srd_b, 'str_b', r'$\Delta{\theta}_{b}$',
                              r'$\Delta{\hat{\theta}}_{b}$', l_angle
                          ]])

    temp_tor = []
    for i in [2, 5, 10, 20]:
        fn = f'tortuosity_{i}'
        sn = f'tor{i}'
        sym = sup('tor', i)
        esym = sup(hat('tor'), i)
        u = '-'
        temp_tor.append([fn, sn, sym, esym, u])
    tor_ar = generate_entries(bases=temp_tor, types=['mean', 'std']).tolist()
    tor_ar.append(['tortuosity', 'tor', 'tor', hat('tor'), '-'])
    tor_ar = np.array(tor_ar)
    random_ar1 = generate_entries(bases=lin_ar[:-1, :].tolist(),
                                  types=['mean', 'std'])
    sc_random_ar1 = generate_entries(bases=random_ar1.tolist(), types=['scal'])

    srd_sc_random_ar1 = generate_entries(bases=sc_random_ar1.tolist(),
                                         types=['stride'])
    random_ar2 = generate_entries(bases=chunk_ar[:5, :].tolist(),
                                  types=['mean', 'std'])
    # sc_random_ar2 = generate_entries(bases=random_ar2[:4, :].tolist(), types=['scal'])
    random_ar3 = generate_entries(bases=ang_ar.tolist(), types=['mean', 'std'])
    random_ar4 = generate_entries(bases=orient_ar.tolist(),
                                  types=['mean', 'std'])
    random_ar5 = generate_entries(bases=sc_lin_ar.tolist(),
                                  types=['mean', 'std'])
    sc_chunk_ar = generate_entries(bases=random_ar5.tolist(), types=['stride'])
    par_ar = np.vstack([
        par_ar,
        ang_ar,
        lin_ar,
        sc_lin_ar,
        # sc_random_ar2,
        chunk_ar,
        random_ar2,
        dsp_ar,
        tor_ar,
        orient_ar,
        random_ar1,
        sc_random_ar1,
        sc_chunk_ar,
        srd_sc_random_ar1,
        random_ar3,
        random_ar4,
        random_ar5
    ])

    ind_col = 1
    sel_cols = [x for x in range(par_ar.shape[1]) if x != ind_col]
    par_db = pd.DataFrame(
        data=par_ar[:, sel_cols],
        index=par_ar[:, ind_col],
        columns=[c for i, c in enumerate(cols) if i != ind_col])
    par_db.index.name = cols[ind_col]

    par_db = par_db[~par_db.index.duplicated(keep='first')]
    par_db['unit'].loc['str_tr'] = '% time crawling'
    par_db['unit'].loc['non_str_tr'] = '% time not crawling'
    par_db['unit'].loc['pau_tr'] = '% time pausing'
    par_db['unit'].loc['fee_tr'] = '% time feeding'
    par_db['unit'].loc['tur_tr'] = '% time turning'
    par_db['unit'].loc['Ltur_tr'] = '% time turning left'
    par_db['unit'].loc['Rtur_tr'] = '% time turning right'

    par_db['unit'].loc['cum_sd'] = 'scaled pathlength'
    par_db['unit'].loc['cum_d'] = 'pathlength $(mm)$'

    par_db['unit'].loc['b'] = 'bend angle $(deg)$'
    par_db['unit'].loc['bv'] = 'bending velocity $(deg/sec)$'
    par_db['unit'].loc['ba'] = 'bending acceleration $(deg^2/sec)$'
    par_db['unit'].loc['fo'] = 'orientation angle $(deg)$'
    par_db['unit'].loc['ro'] = 'rear orientation angle $(deg)$'
    par_db['unit'].loc['fov'] = 'orientation velocity $(deg/sec)$'
    par_db['unit'].loc['foa'] = 'orientation acceleration $(deg^2/sec)$'
    par_db['unit'].loc['rov'] = 'rear orientation velocity $(deg/sec)$'
    par_db['unit'].loc['roa'] = 'rear orientation acceleration $(deg^2/sec)$'

    par_db['unit'].loc['str_fo'] = r'$\Delta\theta_{or}$ over strides $(deg)$'
    par_db['unit'].loc[
        'str_ro'] = r'$\Delta\theta_{or_{r}}$ over strides $(deg)$'
    par_db['unit'].loc['tur_fo'] = r'$\Delta\theta_{or}$ over turns $(deg)$'
    par_db['unit'].loc[
        'tur_ro'] = r'$\Delta\theta_{or_{r}}$ over turns $(deg)$'

    par_db['unit'].loc['fee_N'] = '# feeding events'

    par_db.loc['sf_am'] = {
        'par': 'scaled_amount_eaten',
        'symbol': '${m^{*}}_{feed}$',
        'exp_symbol': '${\hat{m^{*}}}_{feed}$',
        'unit': 'food intake as % larval mass',
        # 'collect' : None
    }

    par_db.loc['c_odor1'] = {
        'par': 'first_odor_concentration',
        'symbol': '${C}_{odor_{1}}$',
        'exp_symbol': '${\hat{C}_{odor_{1}}$',
        'unit': 'Concentration C(t), $\mu$M',
        # 'collect' : 'first_odor_concentration'
    }

    par_db.loc['dc_odor1'] = {
        'par': 'first_odor_concentration_change',
        'symbol': '$\delta{C}_{odor_{1}}$',
        'exp_symbol': '$\delta{\hat{C}_{odor_{1}}$',
        'unit': 'Concentration change dC(t), $-$',
        # 'collect' : 'first_odor_concentration'
    }

    par_db.loc['A_olf'] = {
        'par': 'olfactory_activation',
        'symbol': '$A_{olf}$',
        'exp_symbol': '$\hat{A}_{olf}$',
        'unit': 'Olfactory activation',
        # 'collect' : 'olfactory_activation'
    }

    par_db.loc['A_tur'] = {
        'par': 'turner_activation',
        'symbol': '$A_{tur}$',
        'exp_symbol': '$\hat{A}_{tur}$',
        'unit': 'Turner activation',
        # 'collect' : 'turner_activation'
    }

    par_db.loc['Act_tur'] = {
        'par': 'turner_activity',
        'symbol': '$Act_{tur}$',
        'exp_symbol': '$\hat{Act}_{tur}$',
        'unit': 'Turner activity',
        # 'collect' : 'ang_activity'
    }

    par_db['lim'] = None
    par_db['lim'].loc['b'] = [-180, 180]
    par_db['lim'].loc['fo'] = [0, 360]
    par_db['lim'].loc['ro'] = [0, 360]
    par_db['lim'].loc['fov'] = [-300, 300]
    par_db['lim'].loc['rov'] = [-300, 300]

    par_db['lim'].loc['f_am'] = [0.0, 10**-5]
    par_db['lim'].loc['hunger'] = [0.0, 1.0]
    par_db['lim'].loc['puppation_buffer'] = [0.0, 1.0]
    par_db['lim'].loc['reserve_density'] = [0.0, 2.0]
    par_db['lim'].loc['deb_f'] = [0.0, 2.0]

    par_db['lim'].loc['c_odor1'] = [0.0, 8.0]
    par_db['lim'].loc['dc_odor1'] = [-0.05, 0.05]
    par_db['lim'].loc['A_olf'] = [-1.0, 1.0]
    par_db['lim'].loc['A_tur'] = [10.0, 40.0]
    par_db['lim'].loc['Act_tur'] = [-20.0, 20.0]

    par_db['collect'] = None
    for k, v in step_database.items():
        par_db['collect'].loc[par_db['par'] == k] = v

    # par_db['type'] = None
    # par_db['type'].loc['str0'] = bool
    # par_db['type'].loc['str1'] = bool

    par_db.to_csv(ParDb_path, index=True, header=True)
Ejemplo n.º 2
0
def fit_crawl_params(d,
                     target_point=None,
                     fit_filepath=None,
                     save_to=None,
                     save_as='crawl_fit.pdf'):
    if save_to is None:
        save_to = d.plot_dir
    filepath = os.path.join(save_to, save_as)
    e = d.endpoint_data
    if target_point is None:
        target_point = d.point
    point = d.point
    # exp_dst = nam.dst(d.point)
    exp_dst = 'distance'
    # dst = nam.dst(d.point)
    dst = 'distance'
    exp_cum_sdst = nam.cum(nam.scal(exp_dst))
    cum_sdst = nam.cum(nam.scal(dst))
    Nstrides = nam.num('stride')
    stride_ratio = nam.dur_ratio('stride')
    dispersion = nam.scal(nam.final('40sec_dispersion'))

    exp_pars = [Nstrides, stride_ratio, exp_cum_sdst]
    pars = [Nstrides, stride_ratio, cum_sdst]
    ranges = [(100, 300), (0.5, 1.0), (20, 80)]
    # print(pars)
    # print(exp_cum_sdst, cum_sdst)
    exp_pars, fitted_distros, target_stats = d.load_fits(
        filepath=fit_filepath, selected_pars=exp_pars)
    # print(exp_pars)
    fits = []

    labels = ['$N_{strides}$', 'crawling ratio', '$distance_{scal}$']
    xlabels = ['counts $(-)$', 'time ratio $(-)$', 'scal distance $(-)$']
    colors = ['r', 'c', 'g']
    nbins = 20
    height = 0.3
    fig, axs = plt.subplots(int(len(pars) / 3),
                            3,
                            figsize=(15, int(5 * len(pars) / 3)),
                            sharey=True)
    axs = axs.ravel()
    for i, (par, lab, xl, (rmin, rmax), w, target, c) in enumerate(
            zip(pars, labels, xlabels, ranges, fitted_distros, target_stats,
                colors)):
        # print(par)
        data = e[par].dropna().values
        # rmin, rmax=np.min(data), np.max(data)
        x = np.linspace(rmin, rmax, nbins)
        # f = Fitter(data)
        # f.distributions = ['norm']
        # f.timeout = 200
        # f.fit()
        # w = f.get_best()
        # print(w)
        loc, scale = list(w.values())[0]
        stat, pvalue = stats.kstest(data,
                                    list(w.keys())[0],
                                    args=list(w.values())[0])
        fits.append([par, stat, pvalue])
        print(
            f'Parameter {par} was fitted with stat : {stat} vs target stat : {target}'
        )
        y = norm.rvs(size=10000, loc=loc, scale=scale)
        n_weights = np.ones_like(y) / float(len(y))
        my_n, my_bins, my_patches = axs[i].hist(y,
                                                bins=x,
                                                weights=n_weights,
                                                alpha=0)
        axs[i].scatter(my_bins[:-1] + 0.5 * (my_bins[1:] - my_bins[:-1]),
                       my_n,
                       marker='o',
                       c='k',
                       s=40,
                       alpha=0.6)
        axs[i].plot(my_bins[:-1] + 0.5 * (my_bins[1:] - my_bins[:-1]),
                    my_n,
                    alpha=0.6,
                    c='k',
                    linewidth=2)

        weights = np.ones_like(data) / float(len(data))
        axs[i].hist(data,
                    bins=x,
                    weights=weights,
                    label=lab,
                    color=c,
                    alpha=0.6)
        axs[i].legend(loc='upper right', fontsize=12)
        axs[i].set_xlabel(xl, fontsize=12)
        axs[i].set_ylim([0, height])
    axs[0].set_ylabel('probability, $P$', fontsize=15)

    plt.subplots_adjust(left=0.05,
                        bottom=0.1,
                        right=0.99,
                        top=0.95,
                        wspace=0.01,
                        hspace=0.3)
    plt.savefig(filepath, dpi=300)
    print(f'Image saved as {filepath} !')
    return fits
Ejemplo n.º 3
0
def fit_endpoint_params(d,
                        fit_filepath=None,
                        save_to=None,
                        save_as='endpoint_fit.pdf'):
    if save_to is None:
        save_to = d.plot_dir
    filepath = os.path.join(save_to, save_as)
    e = d.endpoint_data
    # TURNER PARAMETERS
    # -----------------------

    # The bend of the front-body (sum of half the angles) is the target for the turner calibration
    # The head reorientation velocity is the result of the turner calibration
    # if mode == 'experiment':
    # point = d.critical_spinepoint

    # act_r = 'non_rest_dur_fraction'
    #
    # elif mode == 'simulation':
    # point = 'centroid'
    point = d.point
    dst = 'distance'
    # dst = d.dst_param(point)
    v = 'velocity'
    # v = d.vel_param(point)
    a = 'acceleration'
    # a = d.acc_param(point)
    sv = nam.scal(v)
    sa = nam.scal(a)
    cum_dst = nam.cum(dst)
    cum_sdst = nam.cum(nam.scal(dst))

    # Stride related parameters. The scal-distance-per-stride is the result of the crawler calibration
    stride_flag = nam.max(sv)
    stride_d = nam.dst('stride')
    stride_sd = nam.scal(stride_d)
    f_crawler = nam.freq(sv)
    Nstrides = nam.num('stride')
    stride_ratio = nam.dur_ratio('stride')
    l = 'length'

    pars = [
        l, f_crawler,
        nam.mean(stride_d),
        nam.mean(stride_sd), Nstrides, stride_ratio, cum_dst, cum_sdst,
        nam.max('40sec_dispersion'),
        nam.scal(nam.max('40sec_dispersion')),
        nam.final('40sec_dispersion'),
        nam.scal(nam.final('40sec_dispersion')), 'stride_reoccurence_rate',
        'stride_reoccurence_rate',
        nam.mean('bend'),
        nam.mean(nam.vel('bend'))
    ]
    ranges = [(2, 6), (0.6, 2.25), (0.4, 1.6), (0.1, 0.35), (100, 300),
              (0.3, 1.0), (0, 360), (0, 80), (0, 70), (0, 20), (0, 70),
              (0, 20), (0.5, 1.0), (0.5, 1.0), (-20.0, 20.0), (-8.0, 8.0)]
    # print(pars)

    if fit_filepath is None:
        # These are the fits of a 100 larvae dataset
        fitted_distros = [
            {
                'norm': (4.57, 0.54)
            },
            {
                'norm': (1.44, 0.203)
            },
            {
                'norm': (1.081, 0.128)
            },
            {
                'norm': (0.239, 0.031)
            },
            {
                'norm': (249.8, 36.4)
            },
            {
                'norm': (55.1, 7.9)
            },
            {
                'norm': (43.3, 16.2)
            },
            {
                'norm': (9.65, 3.79)
            },
        ]
        target_stats = [0.074, 0.087, 0.086, 0.084, 0.057, 0.048, 0.068, 0.094]
    else:
        pars, fitted_distros, target_stats = d.load_fits(filepath=fit_filepath,
                                                         selected_pars=pars)
        # print(pars)
    fits = []

    labels = [
        'body length', 'stride frequency', 'stride displacement',
        'scal stride displacement', 'num strides', 'crawling ratio',
        'displacement in 3 min', 'scal displacement in 3 min',
        'max dispersion in 40 sec', 'max scal dispersion in 40 sec',
        'dispersion in 40 sec', 'scal dispersion in 40 sec',
        'stride reoccurence rate', 'stride reoccurence rate',
        'mean bend angle', 'mean bend velocity'
    ]
    xlabels = [
        'length $(mm)$', 'frequency $(Hz)$', 'distance $(mm)$',
        'scal distance $(-)$', 'counts $(-)$', 'time ratio $(-)$',
        'distance $(mm)$', 'scal distance $(-)$', 'distance $(mm)$',
        'scal distance $(-)$', 'distance $(mm)$', 'scal distance $(-)$',
        'rate $(-)$', 'rate $(-)$', 'angle $(deg)$',
        'angular velocity $(deg/sec)$'
    ]
    nbins = 20
    height = 0.3
    fig, axs = plt.subplots(int(len(pars) / 2),
                            2,
                            figsize=(15, int(5 * len(pars) / 2)),
                            sharey=True)
    axs = axs.ravel()
    for i, (par, lab, xl, (rmin, rmax), w, target) in enumerate(
            zip(pars, labels, xlabels, ranges, fitted_distros, target_stats)):
        # print(par)
        data = e[par].dropna().values
        # rmin, rmax=np.min(data), np.max(data)
        x = np.linspace(rmin, rmax, nbins)
        # f = Fitter(data)
        # f.distributions = ['norm']
        # f.timeout = 200
        # f.fit()
        # w = f.get_best()
        # print(w)
        loc, scale = list(w.values())[0]
        stat, pvalue = stats.kstest(data,
                                    list(w.keys())[0],
                                    args=list(w.values())[0])
        fits.append([par, stat, pvalue])
        print(
            f'Parameter {par} was fitted with stat : {stat} vs target stat : {target}'
        )
        y = norm.rvs(size=10000, loc=loc, scale=scale)
        n_weights = np.ones_like(y) / float(len(y))
        my_n, my_bins, my_patches = axs[i].hist(y,
                                                bins=x,
                                                weights=n_weights,
                                                alpha=0)
        axs[i].scatter(my_bins[:-1] + 0.5 * (my_bins[1:] - my_bins[:-1]),
                       my_n,
                       marker='o',
                       c='k',
                       s=40,
                       alpha=0.6)
        axs[i].plot(my_bins[:-1] + 0.5 * (my_bins[1:] - my_bins[:-1]),
                    my_n,
                    alpha=0.6,
                    c='k',
                    linewidth=2,
                    label='norm fit')

        weights = np.ones_like(data) / float(len(data))
        axs[i].hist(data,
                    bins=x,
                    weights=weights,
                    label=lab,
                    color='b',
                    alpha=0.6)
        axs[i].legend(loc='upper right', fontsize=12)
        axs[i].set_xlabel(xl, fontsize=12)
        axs[i].set_ylim([0, height])
    axs[0].set_ylabel('probability, $P$', fontsize=15)
    axs[2].set_ylabel('probability, $P$', fontsize=15)
    axs[4].set_ylabel('probability, $P$', fontsize=15)

    plt.subplots_adjust(left=None,
                        bottom=None,
                        right=None,
                        top=None,
                        wspace=0.01,
                        hspace=0.3)
    plt.savefig(filepath, dpi=300)
    print(f'Image saved as {filepath} !')
    return fits
Ejemplo n.º 4
0
def fit_bout_params(d,
                    fit_filepath=None,
                    save_to=None,
                    save_as='bout_fit.pdf'):
    if save_to is None:
        save_to = os.path.join(d.plot_dir, 'plot_bouts')
    if not os.path.exists(save_to):
        os.makedirs(save_to)
    filepath = os.path.join(save_to, save_as)
    e = d.endpoint_data
    s = d.step_data

    pars = [
        nam.chain_counts_par('stride'),
        nam.dur(nam.non('stride')),
        nam.dur_ratio('stride'),
        nam.dur_ratio('non_stride'),
        nam.num('stride'),
        nam.num('non_stride'),
        nam.dur('rest'),
        nam.dur('activity'),
        nam.dur_ratio('rest'),
        nam.dur_ratio('activity'),
        nam.num('rest'),
        nam.num('activity')
    ]
    ranges = [(1.0, 40.0), (0.0, 10.0), (0.0, 1.0), (0, 1.0), (0, 300),
              (0, 120), (0, 3), (0, 60), (0.0, 1.0), (0.0, 1.0), (0, 100),
              (0, 100)]
    # print(pars)

    if fit_filepath is None:
        # These are the fits of a 100 larvae dataset
        raise ValueError('Not implemented. Please provide fit file')
    else:
        pars, fitted_distros, target_stats = d.load_fits(filepath=fit_filepath,
                                                         selected_pars=pars)
        # print(pars)
    fits = []

    labels = [
        'stride chains', 'stride-free bouts', 'stride ratio',
        'stride-free ratio', 'num strides', 'num non-strides', 'rest bouts',
        'activity bouts', 'rest ratio', 'activity ratio', 'num rests',
        'num activities'
    ]
    xlabels = [
        'length $(-)$', 'time $(sec)$', 'time fraction $(-)$',
        'time fraction $(-)$', 'counts $(-)$', 'counts $(-)$', 'time $(sec)$',
        'time $(sec)$', 'time fraction $(-)$', 'time fraction $(-)$',
        'counts $(-)$', 'counts $(-)$'
    ]
    nbins = 30
    height = 0.4
    fig, axs = plt.subplots(6, 2, figsize=(15, 30), sharey=True)
    axs = axs.ravel()
    for i, (par, lab, xl, (rmin, rmax), w, target) in enumerate(
            zip(pars, labels, xlabels, ranges, fitted_distros, target_stats)):
        print(par)
        try:
            data = e[par].dropna().values
        except:
            data = s[par].dropna().values
        # rmin, rmax=np.min(data), np.max(data)
        x = np.linspace(rmin, rmax, nbins)
        # f = Fitter(data)
        # f.distributions = ['norm']
        # f.timeout = 200
        # f.fit()
        # w = f.get_best()
        # print(w)
        args = list(w.values())[0]
        name = list(w.keys())[0]
        stat, pvalue = stats.kstest(data, name, args=args)
        fits.append([par, stat, pvalue])
        print(
            f'Parameter {par} was fitted with stat : {stat} vs target stat : {target}'
        )
        distr = getattr(stats.distributions, name)
        y = distr.rvs(*args, size=10000)
        n_weights = np.ones_like(y) / float(len(y))
        my_n, my_bins, my_patches = axs[i].hist(y,
                                                bins=x,
                                                weights=n_weights,
                                                alpha=0)
        axs[i].scatter(my_bins[:-1] + 0.5 * (my_bins[1:] - my_bins[:-1]),
                       my_n,
                       marker='o',
                       c='k',
                       s=40,
                       alpha=0.6)
        axs[i].plot(my_bins[:-1] + 0.5 * (my_bins[1:] - my_bins[:-1]),
                    my_n,
                    alpha=0.6,
                    c='k',
                    linewidth=2,
                    label=f'{name} fit')

        weights = np.ones_like(data) / float(len(data))
        axs[i].hist(data,
                    bins=x,
                    weights=weights,
                    label=lab,
                    color='b',
                    alpha=0.6)
        axs[i].legend(loc='upper right', fontsize=12)
        axs[i].set_xlabel(xl, fontsize=12)
        axs[i].set_ylim([0, height])
    axs[0].set_ylabel('probability, $P$', fontsize=15)
    axs[2].set_ylabel('probability, $P$', fontsize=15)
    axs[4].set_ylabel('probability, $P$', fontsize=15)

    plt.subplots_adjust(left=None,
                        bottom=None,
                        right=None,
                        top=None,
                        wspace=0.01,
                        hspace=0.3)
    plt.savefig(filepath, dpi=300)
    print(f'Image saved as {filepath} !')
    return fits
Ejemplo n.º 5
0
bv, fov, rov = nam.vel([b, fo, ro])
ba, foa, roa = nam.acc([b, fo, ro])
fou, rou = nam.unwrap([fo, ro])
d, v, a = 'dst', 'vel', 'acc'
sd, sv, sa = nam.scal([d, v, a])
ld, lv, la = nam.lin([d, v, a])
sld, slv, sla = nam.scal([ld, lv, la])
std = nam.straight_dst(d)
sstd = nam.scal(std)
fv, fsv = nam.freq([v, sv])
cum_d, cum_sd = nam.cum([d, sd])

str, pau, tur, fee = 'stride', 'pause', 'turn', 'feed'
chunks = [str, pau, tur, fee]
str_t, pau_t, tur_t, fee_t = nam.dur(chunks)
str_tr, pau_tr, tur_tr, fee_tr = nam.dur_ratio(chunks)
str_N, pau_N, tur_N, fee_N = nam.num(chunks)
str_d = nam.dst(str)
str_sd = nam.scal(str_d)
dsp = 'dispersion'
dsp40 = f'40sec_{dsp}'
sdsp, sdsp40 = nam.scal([dsp, dsp40])
f_dsp, f_dsp40, f_sdsp, f_sdsp40 = nam.final([dsp, dsp40, sdsp, sdsp40])
mu_dsp, mu_dsp40, mu_sdsp, mu_sdsp40 = nam.mean([dsp, dsp40, sdsp, sdsp40])
max_dsp, max_dsp40, max_sdsp, max_sdsp40 = nam.max([dsp, dsp40, sdsp, sdsp40])
str_fo, str_ro, str_b = nam.chunk_track(str, [fou, rou, b])

l_angle = 'angle $(deg)$'
l_angvel = 'angular velocity $(deg/sec)$'
l_angacc = 'angular acceleration, $(deg^2/sec)$'
l_time = 'time $(sec)$'