Exemple #1
0
 def init_db(self):
     self.engine = db.Engine()
     table = db.Tables()[self.table_name]
     self.columns = [
         getattr(table, column_name) for column_name in self.column_names
     ]
     self.session = db.sessionmaker(bind=self.engine)()
Exemple #2
0
def LBHB_Tables():

    engine = db.Engine()
    Base = automap_base()
    Base.prepare(engine, reflect=True)
    tables = {
            'gCellMaster': Base.classes.gCellMaster,
            'gSingleCell': Base.classes.gSingleCell,
            'sCellFile': Base.classes.sCellFile,
            }
    return tables
Exemple #3
0
def kamiak_to_database(cellids,
                       batch,
                       modelnames,
                       source_path,
                       executable_path=None,
                       script_path=None):

    user = '******'
    linux_user = '******'
    allowqueuemaster = 1
    waitid = 0
    parmstring = ''
    rundataid = 0
    priority = 1
    reserve_gb = 0
    codeHash = 'kamiak'

    if executable_path in [None, 'None', 'NONE', '']:
        executable_path = get_setting('DEFAULT_EXEC_PATH')
    if script_path in [None, 'None', 'NONE', '']:
        script_path = get_setting('DEFAULT_SCRIPT_PATH')

    combined = [(c, b, m)
                for c, b, m in itertools.product(cellids, [batch], modelnames)]
    notes = ['%s/%s/%s' % (c, b, m) for c, b, m in combined]
    commandPrompts = [
        "%s %s %s %s %s" % (executable_path, script_path, c, b, m)
        for c, b, m in combined
    ]

    engine = nd.Engine()
    for (c, b, m), note, commandPrompt in zip(combined, notes, commandPrompts):
        path = os.path.join(source_path, batch, c, m)
        if not os.path.exists(path):
            log.warning("missing fit for: \n%s\n%s\n%s\n"
                        "using path: %s\n", batch, c, m, path)
            continue
        else:
            xfspec, ctx = xforms.load_analysis(path, eval_model=False)
            preview = ctx['modelspec'].meta.get('figurefile', None)
            if 'log' not in ctx:
                ctx['log'] = 'missing log'
            figures_to_load = ctx['figures_to_load']
            figures = [xforms.load_resource(f) for f in figures_to_load]
            ctx['figures'] = figures
            xforms.save_analysis(None, None, ctx['modelspec'], xfspec,
                                 ctx['figures'], ctx['log'])
            nd.update_results_table(ctx['modelspec'], preview=preview)

        conn = engine.connect()
        sql = 'SELECT * FROM tQueue WHERE note="' + note + '"'
        r = conn.execute(sql)
        if r.rowcount > 0:
            # existing job, figure out what to do with it
            x = r.fetchone()
            queueid = x['id']
            complete = x['complete']

            if complete == 1:
                # Do nothing - the queue already shows a complete job
                pass

            elif complete == 2:
                # Change dead to complete
                sql = "UPDATE tQueue SET complete=1, killnow=0 WHERE id={}".format(
                    queueid)
                r = conn.execute(sql)

            else:
                # complete in [-1, 0] -- already running or queued
                # Do nothing
                pass

        else:
            # New job
            sql = "INSERT INTO tQueue (rundataid,progname,priority," +\
                   "reserve_gb,parmstring,allowqueuemaster,user," +\
                   "linux_user,note,waitid,codehash,queuedate,complete) VALUES"+\
                   " ({},'{}',{}," +\
                   "{},'{}',{},'{}'," +\
                   "'{}','{}',{},'{}',NOW(),1)"

            sql = sql.format(rundataid, commandPrompt, priority, reserve_gb,
                             parmstring, allowqueuemaster, user, linux_user,
                             note, waitid, codeHash)
            r = conn.execute(sql)

        conn.close()
Exemple #4
0
def enqueue_exacloud_models(cellist,
                            batch,
                            modellist,
                            user,
                            linux_user,
                            executable_path,
                            script_path,
                            priority=1,
                            time_limit=14,
                            reserve_gb=0,
                            useGPU=False,
                            high_mem=False,
                            exclude=None,
                            force_rerun=False):
    """Enqueues models similarly to nems.db.enqueue_models, except on the Exacloud cluster at ACC.

    :param celllist: List of cells to include in analysis.
    :param batch: Batch number cells originate from.
    :param modellist: List of models to run.
    :param user: Username of one starting the job.
    :param linux_user: OHSU username.
    :param executable_path: Executable used to run script.
    :param script_path: Script to run.
    :param time_limit: Max hours the job will run for. Jobs will terminated if not complete by the end of the time limit.
    :param reserve_gb: Max GB required for the job. Job will fail if memory use goes above this level.
    :param useGPU: Whether or not to be GPU job.
    :param high_mem: Whether or not GPU should be a higher memory one.
    :param exclude: List of nodes to exclude. Comma separated values, no spaces.
    """
    # if batch_path in [None, 'None', 'NONE', '']:
    #     batch_path = Path(r'/home/exacloud/lustre1/LBHB/code/nems_db/nems_lbhb/exacloud/batch_job.py')

    # extra parameters for future
    time_limit = f'--time_limit={time_limit}'
    use_gpu = '--use_gpu' if useGPU else ''
    reserve_gb = f'--reserve_gb={reserve_gb}' if reserve_gb else ''
    high_mem = '--high_mem' if high_mem else ''
    exclude = f'--exclude={exclude}' if exclude is not None else ''
    extra_options = ' '.join(
        [time_limit, reserve_gb, use_gpu, high_mem, exclude])

    # Convert to list of tuples b/c product object only useable once.
    combined = list(itertools.product(cellist, [str(batch)], modellist))
    #log.info(combined)

    queue_items = []

    engine = db.Engine()
    conn = engine.connect()

    for cell, b, model in combined:
        add_msg_str = ''
        progname = ' '.join(
            [extra_options, executable_path, script_path, cell, b, model])
        if "*" in cell:
            progname = ' '.join([
                extra_options, executable_path, script_path, f"'{cell}'", b,
                model
            ])
            add_msg_str = f", subbed '{cell}' for {cell} in tQueue progname."
        note = '/'.join([cell, b, model])

        sql = f"SELECT * FROM Results WHERE batch={b} and cellid='{cell}' and modelname='{model}'"
        rres = conn.execute(sql)

        if (rres.rowcount == 0) | force_rerun:
            sql = 'SELECT * FROM tQueue WHERE allowqueuemaster=18 AND note="' + note + '"'
            r = conn.execute(sql)
            if r.rowcount > 0:
                # existing job, figure out what to do with it

                x = r.fetchone()
                queueid = x['id']
                complete = x['complete']
                if force_rerun:
                    if complete == 1:
                        message = "Resetting existing queue entry for: %s\n" % note
                        sql = f"UPDATE tQueue SET complete=0, killnow=0, progname='{progname}', user='******', priority={priority} WHERE id={queueid}"
                        r = conn.execute(sql)

                    elif complete == 2:
                        message = "Dead queue entry for: %s exists, resetting." % note
                        sql = f"UPDATE tQueue SET complete=0, killnow=0, progname='{progname}', user='******', priority={priority} WHERE id={queueid}"
                        r = conn.execute(sql)

                    else:  # complete in [-1, 0] -- already running or queued
                        message = "Incomplete entry for: %s exists, skipping." % note

                else:

                    if complete == 1:
                        message = "Completed entry for: %s exists, skipping." % note
                    elif complete == 2:
                        message = "Dead entry for: %s exists, skipping." % note
                    else:  # complete in [-1, 0] -- already running or queued
                        message = "Incomplete entry for: %s exists, skipping." % note

                log.info(message)
            else:
                # new job
                queue_item = tQueue(
                    progname=progname,
                    machinename='exacloud',
                    queuedate=datetime.datetime.now(),
                    user=user,
                    linux_user=linux_user,
                    note=note,
                    priority=priority,
                    allowqueuemaster=18,  # exacloud specific code
                )

                queue_items.append(queue_item)
                message = f"Added exacloud job: {note}{add_msg_str}"
        else:
            message = "Model fit for: %s exists, skipping." % note

        log.info(message)

    with db_session() as session:
        session.add_all(queue_items)
Exemple #5
0
 def init_db(self):
     self.engine = db.Engine()
     table = db.Tables()[self.table_name]
     self.column = getattr(table, self.column_name)
     self.session = db.sessionmaker(bind=self.engine)()
Exemple #6
0
 def init_db(self):
     self.engine = db.Engine()
     self.table = db.Tables()[self.table_name]
     self.session = db.sessionmaker(bind=self.engine)()
Exemple #7
0
cell_count = d_models['n'].max()

modelnames = list(d_models.loc[d_models['n'] == cell_count, 'modelname'])

d = nd.batch_comp(modelnames=modelnames,
                  batch=batch,
                  stat='r_test',
                  cellids=cellids)
cellid = cellids[0]  # d.index[0]
dn = nd.batch_comp(modelnames=modelnames,
                   batch=batch,
                   stat='n_parms',
                   cellids=[cellid])

engine = nd.Engine()
conn = engine.connect()

for m in dn.columns[:]:
    if dn[m][0] == 0:
        xfspec, ctx = load_model_xform(cellid=cellid,
                                       batch=batch,
                                       modelname=m,
                                       eval_model=False)
        phi_vector = ctx['modelspec'].phi_vector
        print('nparms for {} = {}'.format(m, len(phi_vector)))

        sql = "UPDATE Results SET n_parms={} WHERE batch={} AND modelname='{}'".format(
            len(phi_vector), batch, m)
        conn.execute(sql)