コード例 #1
0
ファイル: rq_worker.py プロジェクト: zxytim/jrp
    def run(self, args):
        redis = StrictRedis(config.REDIS_HOST, config.REDIS_PORT)

        names = args.queue_names
        fq = FailedQueue(connection=redis)

        def check_fjob(fjob):
            if fjob.origin not in names:
                return None

            # XXX: Damn... Should change to another database soon
            if isinstance(fjob.exc_info, str):
                temporary_errors = [
                    'sqlite3.OperationalError: database is locked',
                    'rq.timeouts.JobTimeoutException: Task exceeded maximum timeout value',
                    'elasticsearch.exceptions.ConnectionTimeout: ConnectionTimeout caused by',
                    'elasticsearch.exceptions.ConnectionError: ConnectionError',
                    'elasticsearch.exceptions.TransportError: TransportError',
                    'requests.exceptions.ConnectionError: HTTPSConnectionPool',
                    'pdftotext',
                    'not pdf, but `text/xml`',
                    'OperationalError: database is locked',
                    """oss2.exceptions.RequestError: {'status': -2, 'x-oss-request-id': '', 'details': "RequestError: ('Connection aborted.', timeout('timed out'))"}""",
                    "requests.exceptions.ConnectionError: ('Connection aborted.', timeout('timed out'))",
                    """"RequestError: ('Connection aborted.', BrokenPipeError(32, 'Broken pipe'))"}""",
                    """oss2.exceptions.RequestError: {'status': -2, 'x-oss-request-id': '', 'details': "RequestError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))"}""",
                    'port=80): Read timed out. (read timeout=60)"}',
                ]
                for s in temporary_errors:
                    if s in fjob.exc_info:
                        return 'requeue'

                permitted_errors = [
                    'sqlite3.IntegrityError: UNIQUE constraint failed: progress.key'
                ]
                for s in permitted_errors:
                    if s in fjob.exc_info:
                        return 'delete'

            return None

        while True:
            count_requeue = 0
            count_delete = 0

            fjobs = fq.get_jobs()
            for fjob in fjobs:
                t = check_fjob(fjob)
                if t == 'requeue':
                    fq.requeue(fjob.id)
                    count_requeue += 1
                elif t == 'delete':
                    fjob.delete()
                    count_delete += 1

            num_remain = len(fjobs) - count_requeue - count_delete
            print(
                '{} failed jobs: {} requeued, {} deleteed, {} remains'.format(
                    len(fjobs), count_requeue, count_delete, num_remain))
            time.sleep(args.interval)
コード例 #2
0
ファイル: rqfilter.py プロジェクト: ucldc/harvester
from redis import Redis
from rq import Connection, Queue, Worker
from rq.queue import FailedQueue

REDIS_PASSWORD = os.environ.get('REDIS_PASSWORD', None)
REDIS_HOST = os.environ.get('REDIS_HOST', 'localhost')
redis_conn = Redis(host=REDIS_HOST, password=REDIS_PASSWORD)
qfailed = FailedQueue(connection=redis_conn)

#print(dir(qfailed))

#action : can be one of requeue o, connection=redis_conn)
err_search = 'timeout'
action = 'requeue'
#action = 'cancel'

jobs_filtered = []
for job in qfailed.jobs:
    print(job.dump())
    #    print(job.dump().keys())
    if err_search in job.dump()['exc_info']:
        jobs_filtered.append(job)
        job.timeout = 604800  #1week
        job.save()
        if action == 'requeue':
            result = qfailed.requeue(job.id)
            #q = Queue(job.dump()['origin'], connection=redis_conn)
            ##result = q.enqueue(job)
            print result
print('{} jobs matched {}'.format(len(jobs_filtered), err_search))
コード例 #3
0
ファイル: rqfilter.py プロジェクト: mredar/harvester
from redis import Redis
from rq import Connection, Queue, Worker
from rq.queue import FailedQueue

REDIS_PASSWORD = os.environ.get('REDIS_PASSWORD', None)
REDIS_HOST = os.environ.get('REDIS_HOST', 'localhost' )
redis_conn = Redis(host=REDIS_HOST, password=REDIS_PASSWORD)
qfailed = FailedQueue(connection=redis_conn)

#print(dir(qfailed))

#action : can be one of requeue o, connection=redis_conn)
err_search = 'timeout'
action = 'requeue'
#action = 'cancel'

jobs_filtered = []
for job in qfailed.jobs:
    print(job.dump())
#    print(job.dump().keys())
    if err_search in job.dump()['exc_info']:
        jobs_filtered.append(job)
        job.timeout = 604800 #1week
        job.save()
        if action == 'requeue':
            result = qfailed.requeue(job.id)
            #q = Queue(job.dump()['origin'], connection=redis_conn)
            ##result = q.enqueue(job)
            print result
print('{} jobs matched {}'.format(len(jobs_filtered), err_search))