def __init__(self, session, **kwargs): self.db = session.bind self.session = session self.processes = {} self.queue = Queue() self.timeout = 1 self.process_limit = 5 self.parallelize = True self.__dict__.update(kwargs) check_job_table(self.db)
def add_job(session, jobname, args, kwargs={}): isolevel = session.bind.raw_connection().connection.isolation_level if isolevel != 3: raise RuntimeError("db connection has improper isolation level: %d" % isolevel) check_job_table(session.bind) args = pickle.dumps(args) kwargs = pickle.dumps(kwargs) qargs = {"a1": jobname, "a2": args, "a3": kwargs} q = """select count(*) from __dbtruck_jobs__ where fname = :a1 and args = :a2 and kwargs = :a3 and (running = true or done = false)""" count = session.execute(q, qargs).fetchone()[0] if count: session.rollback() raise RuntimeError("duplicate job exists") q = """insert into __dbtruck_jobs__(fname, args, kwargs) values(:a1, :a2, :a3)""" session.execute(q, qargs) session.commit()
def wait(session, secs=10): add_job(session, "wait", [secs], {}) def crash(session, *args): add_job(session, "crash", args, {}) def test_insert(session, v): add_job(session, "test_insert", [v], {}) def test_inc(session, v): add_job(session, "test_inc", [v], {}) if __name__ == "__main__": import locjoin.settings as settings from locjoin import init_model import locjoin.meta as meta db = create_engine(settings.DBURI, isolation_level="REPEATABLE READ") init_model(db) check_job_table(db) run_extractor(meta.session, "realestate_small") # for i in xrange(11, 50, 1): # test_inc(meta.session, i)