Esempio n. 1
0
def populate_queue():
    """ Push some messages into queues. """

    client = GearmanClient(['localhost:4730'])
    for queue, vals in DATA.iteritems():
        for msg in vals:
            print " >> submitting", msg, queue
            client.submit_job(queue, json.dumps(msg), background=True)
Esempio n. 2
0
def spider(worker, job):
    "Gearman entry point"
    url = job.data
    client = GearmanClient(['localhost'])
    for href in url2hrefs(url): # for URL the content of that URL refers to,
        (blah,ext) = path.splitext(href)
        if ext != '.json':
            href = href + '.json'
        # submit a new spider job for that href
        client.submit_job('spider',href,background=True,wait_until_complete=False)
    return job.data
Esempio n. 3
0
class Client(object):
    def __init__(self, config):
        self.config = config
        self.client = GearmanClient(
            self.config.get('gearman', 'hosts').split(','))

    def process_transaction(self, transaction_id, background=True):
        job = self.client.submit_job(self.config.get('gearman', 'taskname'),
                                     str(transaction_id),
                                     background=background)
        if not background:
            return loads(job.result)
Esempio n. 4
0
#!/usr/bin/python

from gearman.client import GearmanClient

client = GearmanClient(['localhost'])

URL = 'http://ifcb-data.whoi.edu/feed.json'

client.submit_job('spider', URL)

Esempio n. 5
0
def submit(url):
    gm_client = GearmanClient([config.job_server])
    gm_client.data_encoder = JSONDataEncoder
    job_req = gm_client.submit_job('worker_process_html', url)
    print ns.url[0], 'is submitted'
    return job_req
Esempio n. 6
0
from gearman.client import GearmanClient
from gearman.worker import GearmanWorker
from gearman.job import GearmanJob
from gearman.constants import *
import crawle_pic
from worker import JSONDataEncoder
import redis
import config

def check_request_status(job_request):
    if job_request.complete:
        print "Job %s finished!  Result: %s - %s" % (job_request.job.unique, job_request.state, job_request.result)
    elif job_request.timed_out:
        print "Job %s timed out!" % job_request.job.unique
    elif job_request.state == JOB_UNKNOWN:
        print "Job %s connection failed!" % job_request.job.unique

if __name__ == '__main__':
    gm_client=GearmanClient([config.job_server])
    gm_client.data_encoder=JSONDataEncoder
    #url='http://www.ttkzm.com/html/pic/'
    url='http://www.ttkzm.com/html/pic/2012/7/10211069.html'
    #url='http://www.ttkzm.com/html/VIP/1/'
    print 'clear cache before submit'
    r_client = redis.StrictRedis(host=config.redis_server, port=config.redis_port, db=0)
    r_client.delete(url)
    print 'submitting index',url
    gm_client.submit_job('worker_process_html',url)
    
Esempio n. 7
0
@author: fengclient
'''
import sys
from gearman.client import GearmanClient
from gearman.worker import GearmanWorker
from gearman.job import GearmanJob
from gearman.constants import *
from common import JSONDataEncoder
import config

def check_request_status(job_request):
    if job_request.complete:
        print "Job %s finished!  Result: %s - %s" % (job_request.job.unique, job_request.state, job_request.result)
    elif job_request.timed_out:
        print "Job %s timed out!" % job_request.job.unique
    elif job_request.state == JOB_UNKNOWN:
        print "Job %s connection failed!" % job_request.job.unique

if __name__ == '__main__':
    if len(sys.argv) < 2:
        print 'usage: work_and_move_out.py dirpath'
        sys.exit(-1)
    else:
        gm_client = GearmanClient([config.job_server])
        gm_client.data_encoder = JSONDataEncoder
        job_req = gm_client.submit_job('worker_process_html', sys.argv[1])
        print sys.argv[1], 'is submitted'
        check_request_status(job_req)