Ejemplo n.º 1
0
def run_decoding_analysis(IsReload=False, **kwargs):
    """
    Specialized postprocessor to queue decoding analysis for the model pred data
    """
    raise DeprecationWarning("Use 'run_decoding'. It is cleaner.")
    if IsReload:
        log.info("Reload, skipping rda")
        return {}
    modelname = kwargs['meta']['modelname']
    # figure out movement keywords
    threshold = 25
    window = 1
    ops = modelname.split('-')
    for o in ops:
        if o.startswith('mvm'):
            parms = o.split('.')
            for p in parms:
                if p.startswith('t'):
                    threshold = int(p[1:])
                elif p.startswith('w'):
                    window = int(p[1:])

    # "base dprime" analysis
    mn = f'dprime_mvm-{threshold}-{window}_jk10_zscore_nclvz_fixtdr2-fa'

    # noise dims
    noise = [-1, 0, 1, 2, 3, 4, 5, 6]
    modellist = []
    for n in noise:
        if n > 0:
            modellist.append(mn + f'_noiseDim-{n}')
        elif n == -1:
            modellist.append(mn + f'_noiseDim-dU')
        else:
            modellist.append(mn)

    # append lv modelname
    modellist = [mn + f'_model-LV-{modelname}' for mn in modellist]

    script = '/auto/users/hellerc/code/projects/nat_pupil_ms/dprime_new/cache_dprime.py'
    python_path = '/auto/users/hellerc/anaconda3/envs/lbhb/bin/python'

    nd.enqueue_models(celllist=[kwargs['meta']['cellid'][:7]],
                      batch=kwargs['meta']['batch'],
                      modellist=modellist,
                      executable_path=python_path,
                      script_path=script,
                      user='******',
                      force_rerun=True,
                      reserve_gb=2)
    log.info('Queued decoding analysis')
    return {}
Ejemplo n.º 2
0
def second_fit_pop_models(batch, start_from=None, test_count=None):
    all_cellids = nd.get_batch_cells(batch, as_list=True)
    if batch == 322:
        sites = NAT4_A1_SITES
    else:
        sites = NAT4_PEG_SITES
    cellids = [
        c for c in all_cellids
        if np.any([c.startswith(s.split('.')[0]) for s in sites])
    ]

    modelnames = []
    for k, v in MODELGROUPS.items():
        if ('_single' not in k) and ('_exploration' not in k) and (k != 'LN'):
            modelnames.extend(v)
    iterator = cellids

    for siteid in iterator:
        for modelname in modelnames[start_from:test_count]:
            do_fit = True
            if not FORCE_RERUN:
                d = nd.pd_query(
                    "SELECT * FROM Results WHERE cellid like %s and modelname=%s and batch=%s",
                    params=(siteid + "%", modelname, batch))
                if len(d) > 0:
                    do_fit = False
                    print(f'Fit exists for {siteid} {batch} {modelname}')
            if do_fit:
                nd.enqueue_models(
                    celllist=[siteid],
                    batch=batch,
                    modellist=[modelname],
                    user="******",
                    #executable_path='/auto/users/jacob/bin/anaconda3/envs/jacob_nems/bin/python',
                    executable_path=
                    '/auto/users/svd/bin/miniconda3/envs/tf/bin/python',
                    script_path=
                    '/auto/users/jacob/bin/anaconda3/envs/jacob_nems/nems/scripts/fit_single.py'
                )

    return modelnames
Ejemplo n.º 3
0
def enqueue_models_view():
    """Call modelfit.enqueue_models with user selections as args."""

    user = get_current_user()

    # Only pull the numerals from the batch string, leave off the description.
    bSelected = request.args.get('bSelected')[:3]
    cSelected = request.args.getlist('cSelected[]')
    mSelected = request.args.getlist('mSelected[]')
    codeHash = request.args.get('codeHash')
    execPath = request.args.get('execPath')
    scriptPath = request.args.get('scriptPath')
    force_rerun = request.args.get('forceRerun', type=int)
    useKamiak = request.args.get('useKamiak', type=int)
    kamiakFunction = request.args.get(
        'kamiakFunction')  # fn to generate scripts
    kamiakPath = request.args.get('kamiakPath')  # path to store output in
    loadKamiak = request.args.get('loadKamiak',
                                  type=int)  # check to load results
    kamiakResults = request.args.get('kamiakResults')  # path to results
    useGPU = request.args.get('useGPU', type=int)  # path to results
    useExacloud = request.args.get('useExacloud', type=int)
    exaOHSU = request.args.get('exaOHSU')
    exaExec = request.args.get('exaExec')
    exaScript = request.args.get('exaScript')
    exaLimit = request.args.get('exaLimit')
    exaExclude = request.args.get('exaExclude')
    exaHighMem = request.args.get('exaHighMem', type=int)

    if loadKamiak:
        kamiak_to_database(cSelected, bSelected, mSelected, kamiakResults,
                           execPath, scriptPath)
        return jsonify(data=True)

    elif useExacloud:
        log.info('Starting exacloud jobs!')
        enqueue_exacloud_models(cellist=cSelected,
                                batch=bSelected,
                                modellist=mSelected,
                                user=user.username,
                                linux_user=exaOHSU,
                                executable_path=exaExec,
                                script_path=exaScript,
                                time_limit=exaLimit,
                                useGPU=useGPU,
                                high_mem=exaHighMem,
                                exclude=exaExclude)
        return jsonify(data=True)

    elif useKamiak:
        # kamiakFunction should be a stringified pointer to a function
        # that takes a list of cellids, a batch, a list of modelnames,
        # and a directory where the output should be stored,
        # Ex: kamiakScript = 'nems_lbhb.utils.my_kamiak_function'
        try:
            kamiak_script = _lookup_fn_at(kamiakFunction, ignore_table=True)
            kamiak_script(cSelected, bSelected, mSelected, kamiakPath)
            return jsonify(data=True)
        except AttributeError:
            log.warning('kamiakFunction doesnt exist or is improperly defined')
            return jsonify(data=False)
    else:
        if not codeHash:
            codeHash = 'master'
        if not execPath:
            execPath = None
        if not scriptPath:
            scriptPath = None

        enqueue_models(cSelected,
                       bSelected,
                       mSelected,
                       force_rerun=bool(force_rerun),
                       user=user.username,
                       codeHash=codeHash,
                       executable_path=execPath,
                       script_path=scriptPath,
                       GPU_job=useGPU)

        return jsonify(data=True)
Ejemplo n.º 4
0
    modellist = np.concatenate([[m+f'_model-LV-{lvstr}' for lvstr in lvmodelnames] for m in modellist]).tolist()
    if batch == 331:
        modellist = [m.replace('loadpred', loadpredkey) for m in modellist]


if zscore == False:
    modellist = [m.replace('_zscore', '') for m in modellist]

if exclude_lowFR:
    modellist = [m+f'_rmlowFR-{thresh}' for m in modellist]

if movement_mask is not False:
    modellist = [m.replace('dprime_', f'dprime_mvm-{movement_mask[0]}-{movement_mask[1]}_') for m in modellist]

if use_old_cpn:
    modellist = [m.replace('dprime_', f'dprime_oldCPN_') for m in modellist]

modellist = [m for m in modellist if '_pr' not in m]

script = '/auto/users/hellerc/code/projects/nat_pupil_ms/dprime_new/cache_dprime.py'
python_path = '/auto/users/hellerc/anaconda3/envs/lbhb/bin/python'

nd.enqueue_models(celllist=sites,
                  batch=batch,
                  modellist=modellist,
                  executable_path=python_path,
                  script_path=script,
                  user='******',
                  force_rerun=force_rerun,
                  reserve_gb=4)
Ejemplo n.º 5
0
if exacloud:
    from nems_lbhb.exacloud.queue_exacloud_job import enqueue_exacloud_models
    #force_rerun=True
    lbhb_user = "******"
    # exacloud settings:
    executable_path = '/home/users/davids/anaconda3/envs/nems/bin/python'
    script_path = '/home/users/davids/nems/scripts/fit_single.py'
    ssh_key = '/home/svd/.ssh/id_rsa'
    user = "******"

    enqueue_exacloud_models(cellist=sites,
                            batch=batch,
                            modellist=modelnames,
                            user=lbhb_user,
                            linux_user=user,
                            force_rerun=force_rerun,
                            executable_path=executable_path,
                            script_path=script_path,
                            useGPU=True)

else:
    script = '/auto/users/hellerc/code/NEMS/scripts/fit_single.py'
    python_path = '/auto/users/hellerc/anaconda3/envs/tf/bin/python'
    nd.enqueue_models(celllist=sites,
                      batch=batch,
                      modellist=modelnames,
                      executable_path=python_path,
                      script_path=script,
                      user='******',
                      force_rerun=force_rerun,
                      GPU_job=0)
    'psth.fs20.pup-ld-st.pup0.afl0-ref-psthfr.s_stategain.S_jk.nf20-basic',
    'psth.fs20.pup-ld-st.pup.afl.pxf-ref-psthfr.s_stategain.S_jk.nf20-basic',
    'psth.fs20.pup-ld-st.pup0.afl.pxf-ref-psthfr.s_stategain.S_jk.nf20-basic',
    'psth.fs20.pup-ld-st.pup.afl0.pxf-ref-psthfr.s_stategain.S_jk.nf20-basic',
    'psth.fs20.pup-ld-st.pup.afl.pxf0-ref-psthfr.s_stategain.S_jk.nf20-basic',
    'psth.fs20.pup-ld-st.pup0.afl0.pxf-ref-psthfr.s_stategain.S_jk.nf20-basic',
    'psth.fs20.pup-ld-st.pup.afl0.pxf0-ref-psthfr.s_stategain.S_jk.nf20-basic',
    'psth.fs20.pup-ld-st.pup0.afl.pxf0-ref-psthfr.s_stategain.S_jk.nf20-basic',
    'psth.fs20.pup-ld-st.pup0.afl0.pxf0-ref-psthfr.s_stategain.S_jk.nf20-basic',
    'psth.fs20.pup-ld-st.pup.fil-ref-psthfr.s_sdexp.S_jk.nf20-basic',
    'psth.fs20.pup-ld-st.pup0.fil-ref-psthfr.s_sdexp.S_jk.nf20-basic',
    'psth.fs20.pup-ld-st.pup.fil0-ref-psthfr.s_sdexp.S_jk.nf20-basic',
    'psth.fs20.pup-ld-st.pup0.fil0-ref-psthfr.s_sdexp.S_jk.nf20-basic',
]

batch = 309
force_rerun = True
cells = nd.get_batch_cells(batch).cellid.tolist()
script = '/auto/users/hellerc/code/NEMS/scripts/fit_single.py'
executable = '/auto/users/hellerc/anaconda3/envs/crh_nems/bin/python'

nd.enqueue_models(celllist=cells,
                  modellist=modellist,
                  batch=batch,
                  force_rerun=force_rerun,
                  script_path=script,
                  executable_path=executable,
                  reserve_gb=1,
                  user='******')

Ejemplo n.º 7
0
]
modelnames = [
    "gtgram.fs100.ch18.mono-ld.pop-norm.l1-sev_wc.Nx50-fir.1x25x50-wc.50xR-lvl.R-dexp.R_tfinit.n.lr1e3.et3.es20-newtf.n.lr1e4.l2:4",
    "gtgram.fs100.ch18-ld.pop-norm.l1-sev_wc.Nx50-fir.1x25x50-wc.50xR-lvl.R-dexp.R_tfinit.n.lr1e3.et3.es20-newtf.n.lr1e4.l2:4",
    "gtgram.fs100.ch18.bin-ld.pop-norm.l1-sev_wc.Nx50-fir.1x25x50-wc.50xR-lvl.R-dexp.R_tfinit.n.lr1e3.et3.es20-newtf.n.lr1e4.l2:4"
]

force_rerun = False
run_in_lbhb = False

if run_in_lbhb:
    # first models, run locally so that recordings get generated.
    r = db.enqueue_models(siteids,
                          batch,
                          modelnames,
                          executable_path=executable_path,
                          script_path=script_path,
                          GPU_job=GPU_job,
                          user="******")
    for a, b in r:
        print(a, b)
else:
    # exacloud

    # exacloud queue settings:
    exa_executable_path = '/home/users/davids/anaconda3/envs/nems/bin/python'
    exa_script_path = '/home/users/davids/nems/scripts/fit_single.py'
    ssh_key = '/home/svd/.ssh/id_rsa'
    user = "******"
    lbhb_user = "******"
Ejemplo n.º 8
0
batch_cells = set(nd.get_batch_cells(batch=batch).cellid)
full_analysis = nd.get_results_file(batch)
already_analyzed = full_analysis.cellid.unique().tolist()
# batch_cells = ['BRT037b-39-1'] # best cell


# iterates over every mode, checks what cells have not been fitted with it and runs the fit command.
for model in modelnames:
    ff_model = full_analysis.modelname == model
    already_fitted_cells = set(full_analysis.loc[ff_model, 'cellid'])

    cells_to_fit = list(batch_cells.difference(already_fitted_cells))

    print('model {}, cells to fit:\n{}'.format(model, cells_to_fit))

    out = nd.enqueue_models(celllist=cells_to_fit, batch=batch, modellist=[model], user='******', force_rerun=force_rerun,
                            executable_path=executable_path, script_path=script_path)

    for oo in out:
        print(oo)



DB_pull = False
if DB_pull is True:
    results_table = nd.get_results_file(batch, cellids=list(batch_cells))
    preds = []
    for cell in batch_cells:
        print(cell)
        p = results_table[results_table['cellid'] == cell]['modelpath'].values[0]
        if os.path.isdir(p):
            xfspec, ctx = xforms.load_analysis(p)