コード例 #1
0
ファイル: dp_rlearn.py プロジェクト: wuyou33/surf12adam
def learning_times_rlearn(outdir, learners, streams, nthreads, nrefines):

    job_ids = CompmakeGlobalState.jobs_defined_in_this_session
    id_learner, id_stream = learners[0], streams[0]

    cputime_index_level_learn = np.ones((nthreads, nrefines)) * np.NaN
    walltime_index_level_learn = np.ones((nthreads, nrefines)) * np.NaN
    for i in range(nthreads):
        for ref in range(nrefines):
            search_id = 'learn-%s-%s-%sof%s-refined%s' % (
                id_stream, id_learner, i + 1, nthreads, ref)
            for job_id in job_ids:
                if job_id == search_id:
                    job_cache = get_job_cache(job_id)
                    cputime_index_level_learn[i, ref] = job_cache.cputime_used
                    walltime_index_level_learn[i,
                                               ref] = job_cache.walltime_used

    print cputime_index_level_learn
    print walltime_index_level_learn

    cputime_index_level_summarize = np.ones((nthreads, nrefines)) * np.NaN
    walltime_index_level_summarize = np.ones((nthreads, nrefines)) * np.NaN
    for i in range(nthreads):
        for ref in range(nrefines):
            search_id = 'learn-%s-%s-%s-refined%s-summarize' % (
                id_stream, id_learner, i, ref
            )  # bug in computation naming, should be i+1
            print(search_id)
            for job_id in job_ids:
                if job_id == search_id:
                    job_cache = get_job_cache(job_id)
                    cputime_index_level_summarize[i,
                                                  ref] = job_cache.cputime_used
                    walltime_index_level_summarize[
                        i, ref] = job_cache.walltime_used

    print cputime_index_level_summarize
    print walltime_index_level_summarize

    times = {
        'cputime_index_level_learn': cputime_index_level_learn,
        'walltime_index_level_learn': walltime_index_level_learn,
        'cputime_index_level_summarize': cputime_index_level_summarize,
        'walltime_index_level_summarize': walltime_index_level_summarize
    }

    #    pdb.set_trace()
    pickle.dump(times, open(os.path.join(outdir, 'times.pickle'), 'wb'))
コード例 #2
0
def my_get_job_cache(job_id):
    """ Gets the job cache, making sure it was done """
    cache = get_job_cache(job_id)
    if cache.state != Cache.DONE:
        msg = 'The job %s was supposed to be finished: %s' % (job_id, cache) 
        raise Exception(msg)
    return cache
コード例 #3
0
def my_get_job_cache(context, the_job):
    """ Gets the job cache, making sure it was done """
    db = context.get_compmake_db()
    cache = get_job_cache(the_job, db=db)
    if cache.state != Cache.DONE:
        msg = 'The job %s was supposed to be finished: %s' % (the_job, cache) 
        raise Exception(msg)
    return cache
コード例 #4
0
ファイル: dp_rlearn.py プロジェクト: AndreaCensi/surf12adam
def learning_times_rlearn(outdir, learners, streams, nthreads, nrefines):
    
    job_ids = CompmakeGlobalState.jobs_defined_in_this_session
    id_learner, id_stream = learners[0], streams[0]

    cputime_index_level_learn = np.ones((nthreads, nrefines)) * np.NaN
    walltime_index_level_learn = np.ones((nthreads, nrefines)) * np.NaN
    for i in range(nthreads):
        for ref in range(nrefines):
            search_id = 'learn-%s-%s-%sof%s-refined%s' % (id_stream, id_learner, i + 1, nthreads, ref)
            for job_id in job_ids:
                if job_id == search_id:
                    job_cache = get_job_cache(job_id)
                    cputime_index_level_learn[i, ref] = job_cache.cputime_used
                    walltime_index_level_learn[i, ref] = job_cache.walltime_used
                    
    print cputime_index_level_learn
    print walltime_index_level_learn
    
    cputime_index_level_summarize = np.ones((nthreads, nrefines)) * np.NaN
    walltime_index_level_summarize = np.ones((nthreads, nrefines)) * np.NaN
    for i in range(nthreads):
        for ref in range(nrefines):
            search_id = 'learn-%s-%s-%s-refined%s-summarize' % (id_stream, id_learner, i , ref) # bug in computation naming, should be i+1
            print(search_id)
            for job_id in job_ids:
                if job_id == search_id:                    
                    job_cache = get_job_cache(job_id)
                    cputime_index_level_summarize[i, ref] = job_cache.cputime_used
                    walltime_index_level_summarize[i, ref] = job_cache.walltime_used
                    
    print cputime_index_level_summarize
    print walltime_index_level_summarize
    
    times = {'cputime_index_level_learn': cputime_index_level_learn,
             'walltime_index_level_learn': walltime_index_level_learn,
             'cputime_index_level_summarize': cputime_index_level_summarize,
             'walltime_index_level_summarize': walltime_index_level_summarize }
    
#    pdb.set_trace()
    pickle.dump(times, open(os.path.join(outdir, 'times.pickle'), 'wb'))
コード例 #5
0
def count_resources(context, the_job):
    db = context.get_compmake_db()
    cache = get_job_cache(the_job, db=db)
    if cache.state != Cache.DONE:
        msg = 'The job %s was supposed to be finished: %s' % (the_job, cache) 
        raise Exception(msg)
    
    cq = CacheQueryDB(db)
    children = cq.tree_children_and_uodeps(the_job)
    check_isinstance(children, set)
    children.add(the_job)
    
    res = {}
    for j in children:
        res[j] = context.comp_dynamic(my_get_job_cache, j, extra_dep=[Promise(j)],
                                     job_id='count-%s-%s' % (the_job, j))
        
    return context.comp(finalize_result, res)
コード例 #6
0
ファイル: meat.py プロジェクト: AndreaCensi/pysnip
def pysnip_make(dirname, compmake_command):
    files = glob(os.path.join(dirname, '*.py'))
    prefixes = [os.path.splitext(os.path.basename(f))[0] for f in files]
    logger.info('Found %d snippets in directory %s' % (len(prefixes), dirname))
    
    use_filesystem(os.path.join(dirname, '.compmake'))
    ntodo = 0
    for p in prefixes:
        job = Job(dirname, p)
        job_id = job.basename
        current_state = None
        if job_exists(job_id):
            current_state = get_job_cache(job_id).state
            
        if job.status == DONE_UPTODATE:
#            logger.info('%s: done' % job.basename)
            if current_state != Cache.DONE:
                mark_as_done(job_id)
            pass
        elif job.status == FAILED:
#            logger.info('%s: failed' % job.basename)
            if current_state != Cache.FAILED:
                mark_as_failed(job_id)
        elif job.status == DONE_NEEDSUPDATE:
            mark_as_notstarted(job_id)
#            logger.info('%s: done (but needs update)' % job.basename)
            pass
        elif job.status == NOTSTARTED:
            mark_as_notstarted(job_id)
#            logger.info('%s: not started' % job.basename)
            pass
        comp(run_job, job, job_id=job_id) 
        if job.status != DONE_UPTODATE:
            ntodo += 1
            
#    logger.info('%d/%d jobs to do' % (ntodo, len(prefixes)))
    batch_command('stats')
    if compmake_command is not None:
        return batch_command(compmake_command)
    else:
        compmake_console()
        return 0