def ping(self): """ Notify the queue that this task is still active. """ if self.finished is not None: raise AlreadyFinished() with self.storage.cursor() as cursor: affected_row = apsw_helpers.get(cursor, ''' SELECT * from %s WHERE id = :task_id AND execution_id = :execution_id AND last_contact > datetime(:now, 'unixepoch', '-%s second') ''' % (self._queue.table_name, self._queue.execution_ttl), now=unix_timestamp(datetime.utcnow()), task_id=self.task_id, execution_id=self.execution_id) if not affected_row: raise TaskDoesNotExist() with self.storage.transaction() as cursor: apsw_helpers.query(cursor, ''' UPDATE %s SET last_contact=datetime(:now, 'unixepoch'), update_count=update_count + 1 WHERE id = :task_id AND execution_id = :execution_id AND last_contact > datetime(:now, 'unixepoch', '-%s second') ''' % (self._queue.table_name, self._queue.execution_ttl), now=unix_timestamp(datetime.utcnow()), task_id=self.task_id, execution_id=self.execution_id)
def _save(self, finished=None, steps=None, result=None, data=None): finished = finished if finished is not None else self.finished with self.storage.transaction() as cursor: apsw_helpers.query(cursor, ''' UPDATE %s SET last_contact=datetime(:now, 'unixepoch'), update_count=update_count + 1, steps=:steps, finished=datetime(:finished, 'unixepoch'), result=:result, bytes_downloaded=:bytes_downloaded, download_rate=:download_rate, data=:data WHERE id = :task_id AND execution_id = :execution_id AND last_contact > datetime(:now, 'unixepoch', '-%s second') ''' % (self._queue.table_name, self._queue.execution_ttl), now=unix_timestamp(datetime.utcnow()), task_id=self.task_id, execution_id=self.execution_id, steps=json.dumps(steps if steps is not None else self.steps), finished=unix_timestamp(finished) if finished else None, result=result if result is not None else self.result, bytes_downloaded=self.bytes_downloaded, download_rate=self.download_rate, data=json.dumps(data if data is not None else self.data)) affected_row = apsw_helpers.get(cursor, ''' SELECT * from %s WHERE id = :task_id AND execution_id = :execution_id AND last_contact > datetime(:now, 'unixepoch', '-%s second') ''' % (self._queue.table_name, self._queue.execution_ttl), now=unix_timestamp(datetime.utcnow()), task_id=self.task_id, execution_id=self.execution_id) if not affected_row: raise TaskDoesNotExist() else: if steps is not None: self.steps = steps if finished is not None: self.finished = finished if result is not None: self.result = result if data is not None: self.data = data
def save(self, job): assert isinstance(job, Job), 'job must be of type Job' with self.storage.transaction() as cursor: cursor.execute(''' REPLACE INTO jobs (id, created, spec) VALUES (?, DATETIME(?, 'unixepoch'), ?) ''', (job.id, unix_timestamp(datetime.datetime.utcnow()), job.json_spec()))
def enqueue(self, data, job_id=None, file_id=None, md5=None, bytes_total=None): """ Enqueue task with specified data. """ jsonified_data = json.dumps(data) with self.storage.transaction() as cursor: apsw_helpers.query(cursor, ''' INSERT INTO %s (created, data, job_id, file_id, md5, bytes_total) VALUES (datetime(:now, "unixepoch"), :data, :job_id, :file_id, :md5, :bytes_total) ''' % self.table_name, now=unix_timestamp(datetime.utcnow()), data=jsonified_data, job_id=job_id, file_id=file_id, md5=md5, bytes_total=bytes_total) # Return the number of rows we inserted. return 1
def bulk_finish(self, result='cancelled', extra_predicate=None): extra_predicate_sql, extra_predicate_args = ( self._build_extra_predicate(extra_predicate)) with self.storage.transaction() as cursor: now = unix_timestamp(datetime.utcnow()) affected_rows = apsw_helpers.query(cursor, ''' SELECT * from %s WHERE finished IS NULL %s ''' % (self.table_name, extra_predicate_sql), **extra_predicate_args) apsw_helpers.query(cursor, ''' UPDATE %s SET execution_id = 0, last_contact = datetime(:now, 'unixepoch'), update_count = update_count + 1, steps = '[]', started = datetime(:now, 'unixepoch'), finished = datetime(:now, 'unixepoch'), result = :result WHERE finished IS NULL %s ''' % (self.table_name, extra_predicate_sql), now=now, result=result, **extra_predicate_args) return len(affected_rows)
def _refresh(self): with self.storage.cursor() as cursor: row = apsw_helpers.get(cursor, ''' SELECT * FROM %s WHERE id = :task_id AND execution_id = :execution_id AND last_contact > datetime(:now, 'unixepoch', '-%s second') ''' % (self._queue.table_name, self._queue.execution_ttl), now=unix_timestamp(datetime.utcnow()), task_id=self.task_id, execution_id=self.execution_id) if not row: raise TaskDoesNotExist() self.task_id = row.id self.data = json.loads(row.data) self.result = row.result self.job_id = row.job_id self.file_id = row.file_id self.md5 = row.md5 self.bytes_total = row.bytes_total self.bytes_downloaded = row.bytes_downloaded self.download_rate = row.download_rate self.steps = self._load_steps(json.loads(row.steps)) self.started = row.started self.finished = row.finished
def enqueue(self, data, job_id=None, file_id=None, md5=None, bytes_total=None): """ Enqueue task with specified data. """ jsonified_data = json.dumps(data) with self.storage.transaction() as cursor: apsw_helpers.query(cursor, ''' INSERT INTO %s (created, data, job_id, file_id, md5, bytes_total) VALUES (datetime(:now, "unixepoch"), :data, :job_id, :file_id, :md5, :bytes_total) ''' % self.table_name, now=unix_timestamp(datetime.utcnow()), data=jsonified_data, job_id=job_id, file_id=file_id, md5=md5, bytes_total=bytes_total) # Return the number of rows we inserted. return 1
def _query_queued(self, cursor, projection, limit=None, extra_predicate=None): extra_predicate_sql, extra_predicate_args = ( self._build_extra_predicate(extra_predicate)) result = apsw_helpers.query( cursor, ''' SELECT %s FROM %s WHERE finished IS NULL AND ( execution_id IS NULL OR last_contact <= datetime(:now, 'unixepoch', '-%s second') ) %s ORDER BY created ASC LIMIT :limit ''' % (projection, self.table_name, self.execution_ttl, extra_predicate_sql), now=unix_timestamp(datetime.utcnow()), limit=sys.maxsize if limit is None else limit, **extra_predicate_args) return result
def requeue(self): if self._running_steps() != 0: raise StepRunning() if self.finished is not None: raise AlreadyFinished() data = copy.deepcopy(self.data) self.bytes_downloaded = None self.download_rate = None data.pop('time_left', None) with self._queue.storage.transaction() as cursor: affected_row = apsw_helpers.get(cursor, ''' SELECT * from %s WHERE id = :task_id AND execution_id = :execution_id AND last_contact > datetime(:now, 'unixepoch', '-%s second') ''' % (self._queue.table_name, self._queue.execution_ttl), now=unix_timestamp(datetime.utcnow()), task_id=self.task_id, execution_id=self.execution_id) if not affected_row: raise TaskDoesNotExist() apsw_helpers.query(cursor, ''' UPDATE %s SET last_contact=NULL, update_count=update_count + 1, started=NULL, steps=NULL, execution_id=NULL, finished=NULL, data=:data, result=NULL WHERE id = :task_id AND execution_id = :execution_id AND last_contact > datetime(:now, 'unixepoch', '-%s second') ''' % (self._queue.table_name, self._queue.execution_ttl), data=json.dumps(data), now=unix_timestamp(datetime.utcnow()), task_id=self.task_id, execution_id=self.execution_id)
def _dequeue_task(self, extra_predicate=None): execution_id = uuid.uuid1().hex extra_predicate_sql, extra_predicate_args = ( self._build_extra_predicate(extra_predicate)) task_id = None with self.storage.transaction() as cursor: while task_id is None: possible_tasks = self._query_queued( cursor, 'id, created, data', limit=5, extra_predicate=extra_predicate) if not possible_tasks: # nothing to dequeue return None for possible_task in possible_tasks: # attempt to claim the task now = unix_timestamp(datetime.utcnow()) apsw_helpers.query(cursor, ''' UPDATE %s SET execution_id = :execution_id, last_contact = datetime(:now, 'unixepoch'), update_count = update_count + 1, started = datetime(:now, 'unixepoch'), steps = '[]' WHERE id = :task_id AND finished IS NULL AND ( execution_id IS NULL OR last_contact <= datetime(:now, 'unixepoch', '-%s second') ) %s ''' % (self.table_name, self.execution_ttl, extra_predicate_sql), now=now, execution_id=execution_id, task_id=possible_task.id, **extra_predicate_args) task_id = possible_task.id break return self.TaskHandlerClass(execution_id=execution_id, task_id=task_id, queue=self)
def valid(self): """ Check to see if we are still active. """ if self.finished is not None: return False with self.storage.cursor() as cursor: row = apsw_helpers.get(cursor, ''' SELECT (last_contact > datetime(:now, 'unixepoch', '-%s second')) AS valid FROM %s WHERE id = :task_id AND execution_id = :execution_id ''' % (self._queue.execution_ttl, self._queue.table_name), now=unix_timestamp(datetime.utcnow()), task_id=self.task_id, execution_id=self.execution_id) return bool(row is not None and row.valid)
def bulk_finish(self, result='cancelled', extra_predicate=None): extra_predicate_sql, extra_predicate_args = ( self._build_extra_predicate(extra_predicate)) with self.storage.transaction() as cursor: now = unix_timestamp(datetime.utcnow()) affected_rows = apsw_helpers.query(cursor, ''' SELECT * from %s WHERE finished IS NULL AND ( execution_id IS NULL OR last_contact <= datetime(:now, 'unixepoch', '-%s second') ) %s ''' % (self.table_name, self.execution_ttl, extra_predicate_sql), now=now, **extra_predicate_args) apsw_helpers.query(cursor, ''' UPDATE %s SET execution_id = 0, last_contact = datetime(:now, 'unixepoch'), update_count = update_count + 1, steps = '[]', started = datetime(:now, 'unixepoch'), finished = datetime(:now, 'unixepoch'), result = :result WHERE finished IS NULL AND ( execution_id IS NULL OR last_contact <= datetime(:now, 'unixepoch', '-%s second') ) %s ''' % (self.table_name, self.execution_ttl, extra_predicate_sql), now=now, result=result, **extra_predicate_args) return len(affected_rows)
def _dequeue_task(self, extra_predicate=None): execution_id = uuid.uuid1().hex extra_predicate_sql, extra_predicate_args = ( self._build_extra_predicate(extra_predicate)) task_id = None with self.storage.transaction() as cursor: while task_id is None: possible_tasks = self._query_queued(cursor, 'id, created, data', limit=5, extra_predicate=extra_predicate) if not possible_tasks: # nothing to dequeue return None for possible_task in possible_tasks: # attempt to claim the task now = unix_timestamp(datetime.utcnow()) apsw_helpers.query(cursor, ''' UPDATE %s SET execution_id = :execution_id, last_contact = datetime(:now, 'unixepoch'), update_count = update_count + 1, started = datetime(:now, 'unixepoch'), steps = '[]' WHERE id = :task_id AND finished IS NULL AND ( execution_id IS NULL OR last_contact <= datetime(:now, 'unixepoch', '-%s second') ) %s ''' % (self.table_name, self.execution_ttl, extra_predicate_sql), now=now, execution_id=execution_id, task_id=possible_task.id, **extra_predicate_args) task_id = possible_task.id break return self.TaskHandlerClass(execution_id=execution_id, task_id=task_id, queue=self)
def _query_queued(self, cursor, projection, limit=None, extra_predicate=None): extra_predicate_sql, extra_predicate_args = ( self._build_extra_predicate(extra_predicate)) result = apsw_helpers.query(cursor, ''' SELECT %s FROM %s WHERE finished IS NULL AND ( execution_id IS NULL OR last_contact <= datetime(:now, 'unixepoch', '-%s second') ) %s ORDER BY created ASC LIMIT :limit ''' % (projection, self.table_name, self.execution_ttl, extra_predicate_sql), now=unix_timestamp(datetime.utcnow()), limit=sys.maxsize if limit is None else limit, **extra_predicate_args) return result
def requeue(self): if self._running_steps() != 0: raise StepRunning() if self.finished is not None: raise AlreadyFinished() with self.storage.cursor() as cursor: affected_row = apsw_helpers.get(cursor, ''' SELECT * from %s WHERE id = :task_id AND execution_id = :execution_id AND last_contact > datetime(:now, 'unixepoch', '-%s second') ''' % (self._queue.table_name, self._queue.execution_ttl), now=unix_timestamp(datetime.utcnow()), task_id=self.task_id, execution_id=self.execution_id) if affected_row is None: raise TaskDoesNotExist() with self.storage.transaction() as cursor: apsw_helpers.query(cursor, ''' UPDATE %s SET last_contact=NULL, update_count=update_count + 1, started=NULL, steps=NULL, execution_id=NULL, finished=NULL, result=NULL WHERE id = :task_id ''' % self._queue.table_name, task_id=self.task_id)
def projection_params(): return { 'now': unix_timestamp(datetime.utcnow()) }