def enqueue(self, method, args): with log_yield(measure="queue.enqueue"): args = json.dumps(args) with self.conn_adapter.connection.cursor(cursor_factory=LoggingCursor) as curs: curs.execute( 'INSERT INTO "queue_classic_jobs" (q_name, method, args) ' "VALUES (%s, %s, %s) RETURNING id", [self.name, method, args], ) return curs.fetchone()[0]
def lock(self, top_bound=None): with log_yield(measure="queue.lock"): if top_bound is None: top_bound = self.top_bound with self.conn_adapter.connection.cursor(cursor_factory=LoggingRealDictCursor) as curs: curs.execute("SELECT * FROM lock_head(%s, %s)", [self.name, top_bound]) if not curs.rowcount: return None job = curs.fetchone() # NOTE: JSON in args is parsed automatically # timestamptz columns are converted automatically to datetime if job["created_at"]: now = datetime.datetime.now(job["created_at"].tzinfo) ttl = now - job["created_at"] _logger.info("measure#qc.time-to-lock=%sms source=%s" % (int(ttl.microseconds / 1000), self.name)) return job
def count(self): with log_yield(measure="queue.count"): with self.conn_adapter.connection.cursor(cursor_factory=psycopg2.extensions.cursor) as curs: curs.execute('SELECT COUNT(*) FROM "queue_classic_jobs" ' "WHERE q_name = %s", [self.name]) return curs.fetchone()[0]
def delete_all(self): with log_yield(measure="queue.delete_all"): return self.conn_adapter.execute('DELETE FROM "queue_classic_jobs" WHERE q_name = %s', [self.name])
def delete(self, id): with log_yield(measure="queue.delete"): return self.conn_adapter.execute('DELETE FROM "queue_classic_jobs" WHERE id = %s', [id])
def unlock(self, id): with log_yield(measure="queue.unlock"): return self.conn_adapter.execute('UPDATE "queue_classic_jobs" ' "SET locked_at = NULL WHERE id = %s", [id])