예제 #1
0
def test_dependency():

    # Job 2 depends on job 1,
    # so job 1 must run first.

    global ran1
    ran1 = False

    global ran2
    ran2 = False

    class Job1(Job):
        def run(self):
            global ran1
            ran1 = True

    class Job2(Job):
        def run(self):
            ok_(ran1)

            global ran2
            ran2 = True

    j1 = Job1('job1')
    j2 = Job2('job2', [j1])

    q = Queue(1)

    q.put(j2)
    q.put(j1)

    q.run()

    ok_(ran2)
예제 #2
0
def test_queue():
    q = Queue(2)

    jobs = []
    for i in range(4):
        job = TestJob()
        q.put(job)
        jobs.append(job)
    q.run()

    for job in jobs:
        ok_(job.test_done)
예제 #3
0
파일: update.py 프로젝트: cmbi/databanks
root.addHandler(sh)
root.addHandler(handler)
sh.setLevel(logging.DEBUG)
handler.setLevel(logging.INFO)
root.setLevel(logging.DEBUG)

_log = logging.getLogger(__name__)

if __name__ == "__main__":

    lock_path = settings['LOCKFILE']
    _log.debug("waiting for lock on %s" % lock_path)
    with FileLock(lock_path) as lock:
        _log.debug("lock acquired")

        queue = Queue(settings["NTHREADS"])

        # 1. Download external data.
        # 2. Schedule jobs that work with downloaded data.
        # 3. run whynot & mrs when all data files have been
        #    generated/downloaded.
        pdb_job = FetchPdbJob()
        uniprot_job = FetchUniprotJob()
        mmcif_job = FetchMmcifJob()
        sf_job = FetchStructureFactorsJob()
        pdbredo_job = FetchPdbredoJob()
        nmr_job = FetchNmrJob()
        queue.put(uniprot_job)
        queue.put(pdb_job)
        queue.put(mmcif_job)
        queue.put(nmr_job)
예제 #4
0
            os.path.join(hssp3_dir, filename)
            for filename in os.listdir(hssp3_dir)
        ]

        hghssp_dir = os.path.join(settings['DATADIR'], 'hg-hssp')
        hghssp_paths = [
            os.path.join(hghssp_dir, filename)
            for filename in os.listdir(hghssp_dir)
        ]

        # Oldest files go first:
        for path in sorted(hssp3_paths + hghssp_paths, key=os.path.getmtime):

            # Not the ones with 9999 hits:
            try:
                if count_hits(path) >= 9999:
                    if path.startswith(hssp3_dir):
                        pdbid = os.path.basename(path).split('.')[0]
                        self._queue.put(HsspJob(pdbid))
                    elif path.startswith(hghssp_dir):
                        seq_id = os.path.basename(path).split('.')[0]
                        self._queue.put(HgHsspJob(seq_id))
            except:
                continue


if __name__ == "__main__":
    q = Queue(12)
    q.put(HsspScheduleJob(q))
    q.run()
예제 #5
0
def test_empty_queue():
    q = Queue(2)
    q.run()
예제 #6
0
def test_mmcif_data_dependency():

    global uniprot_job
    uniprot_job = FetchUniprotJob()

    global mmcif_job
    mmcif_job = FetchMmcifJob()

    global pdb_job
    pdb_job = FetchPdbJob()

    class TestScheduleMmcifDataJob(ScheduleMmcifDataJob):
        def run(self):
            global uniprot_job
            ok_(uniprot_job.done)

            global mmcif_job
            ok_(mmcif_job.done)

            global pdb_job
            ok_(pdb_job.done)

            ScheduleMmcifDataJob.run(self)

    queue = Queue(32)

    data_job = TestScheduleMmcifDataJob(queue, mmcif_job, pdb_job, uniprot_job)

    queue.put(data_job)
    queue.put(uniprot_job)
    queue.put(mmcif_job)
    queue.put(pdb_job)

    queue.run()

    ok_(data_job.done)