Exemple #1
0
 def __init__(self, session, password, styles, host_list = None):
     GearmanWorker.__init__(self, host_list = host_list)
     self.session = session
     self.password = password
     self.styles = styles
     self.register_task("get_mapworker_session", self.getSession)
     self.register_task("get_mapworker_styles", self.getStyles)
class ImageRecognitionWorker:
    worker = None
    db = None

    def __init__(self):
        self.worker = GearmanWorker(['gearman.emag-bot.com'])
        self.worker.register_task('imgrecon', self.ImageRecognition)

        self.db = DBInterface()

    def Work(self):
        self.worker.work()

    def ComputeHistogram(self, url):
        ima = ImageAnalyser()
        ima.LoadImage(url, PathLocation.ONLINE)
        contour = ima.FindContour()
        im_e = ima.ExtractContour(contour)
        #ima.ShowImage(im_e)
        hist = ima.GetHueHistogram(im_e)

        return hist

    def ImageRecognition(self, worker, job):
        print("Got job: " + job.data)
        data = json.loads(job.data)
        hist = self.ComputeHistogram(data['url'])

        db_entries = self.db.QueryForLabels(data['labels'])

        accepted_entries = [[], []]
        i = 0
        for row in db_entries:
            #print(row)
            if row[3] is None or row[3] == '':
                continue

            data = json.loads(row[3])
            data = np.array([[d] for d in data], dtype=np.float32)
            res = cv2.compareHist(hist, data, cv2.HISTCMP_CORREL)

            if row[4] in accepted_entries[1]:
                idx = accepted_entries[1].index(row[4])
                if res > accepted_entries[0][idx]:
                    accepted_entries[0][idx] = res
            else:
                accepted_entries[0].append(res)
                accepted_entries[1].append(row[4])

        ret = [
            x for _, x in sorted(zip(accepted_entries[0], accepted_entries[1]),
                                 reverse=True)
        ]
        print(ret)

        return json.dumps(ret)
    def gearmanWorker(self):

        self.logging.info("Gearmand worker instance started")
        while self.loop():
            try:
                worker_instance=GearmanWorker(self.hostlist)
                worker_instance.register_task(self.queue, self.consume)
                worker_instance.work()
            except Exception as err:
                self.logging.warn ('Connection to gearmand failed. Reason: %s. Retry in 1 second.'%err)
                sleep(1)
    def _gearmanWorkerNotPatched(self):

        self.logging.info("Gearmand worker instance started")
        while self.loop():
            try:
                self.worker_instance = GearmanWorker(self.kwargs.hostlist)
                self.worker_instance.register_task(self.kwargs.queue,
                                                   self.consume)
                self.worker_instance.work()
            except Exception as err:
                self.logging.warn(
                    "Connection to gearmand failed. Reason: '%s'. Retry in 1 second."
                    % err)
                sleep(1)
            finally:
                self.worker_instance.shutdown()
Exemple #5
0
    def __init__(self):
        super(GEWorker, self).__init__()
        self.worker = GearmanWorker(settings.GEARMAN_SERVERS)

        self.worker.register_function(messaging.SUBJECT_THREAD_COUNT, self.thread_count)
        self.worker.register_function(messaging.SUBJECT_THREAD_NAMES, self.thread_names)

        self.worker.register_function(messaging.SUBJECT_AVG, self.average)

        self._set_transaction_isolation()
Exemple #6
0
class TestGearman(GearmanTestCase):
    def setUp(self):
        self.start_server()
        self.last_exception = (None, None)
        self.worker = GearmanWorker(job_servers)
        self.worker.register_function("echo", echo)
        self.worker.register_function("fail", fail)
        self.worker.register_function("sleep", sleep, timeout=1)
        self.worker.register_class(ObjectWorker())
        self.worker.register_class(ClassWorker())
        class Hooks(object):
            @staticmethod
            def start(job):
                pass
            @staticmethod
            def complete(job, res):
                pass
            @staticmethod
            def fail(job, exc):
                self.last_exception = (job.func, exc)

        import thread
        self.worker_thread = thread.start_new_thread(self.worker.work, tuple(), dict(hooks=Hooks)) # TODO: Shouldn't use threads.. but we do for now (also, the thread is never terminated)
        self.client = GearmanClient(job_servers)

    def tearDown(self):
        del self.worker
        del self.client
        self.stop_server()

    def testComplete(self):
        self.failUnlessEqual(self.client.do_task(Task("echo", "bar")), 'bar')

    def testFail(self):
        self.failUnlessRaises(self.client.TaskFailed, lambda:self.client.do_task(Task("fail", "bar")))
        # self.failUnlessEqual(self.last_exception[0], "fail")

    def testCompleteAfterFail(self):
        self.failUnlessRaises(self.client.TaskFailed, lambda:self.client.do_task(Task("fail", "bar")))
        self.failUnlessEqual(self.client.do_task(Task("echo", "bar")), 'bar')

    def testTimeout(self):
        self.failUnlessEqual(self.client.do_task(Task("sleep", "0.1")), '0.1')
        self.failUnlessRaises(self.client.TaskFailed, lambda:self.client.do_task(Task("sleep", "1.5")))

    def testCall(self):
        self.failUnlessEqual(self.client("echo", "bar"), 'bar')

    def testObjectWorker(self):
        self.failUnlessEqual(self.client("ObjectWorker.echo", "foo"), "foo")

    def testClassWorker(self):
        self.failUnlessEqual(self.client("ClassWorker.echo", "foo"), "foo")
def startFetchDependencyInfoWorkers():
    import FetchDependencyInfoWorker

    stopFetchDependencyInfoWorker()

    for i in range(0, 1):
        time.sleep(1)
        result = os.fork()

        if result == 0:
            workerPid = os.getpid()

            fp = open(FetchDependencyInfoWorkerPidFilePath, "a")
            fp.write(" %s" % workerPid)
            fp.close()

            worker = GearmanWorker([GearmanConfig.gearmanConnection])
            worker.register_task(JobList.Job_fetchDependencyInfo, FetchDependencyInfoWorker.doWork)
            worker.work()
    pass
Exemple #8
0
    def _init(self):
        while True:
            while not self.is_available():
                logging.error(
                    "Gearman not available right now. Demon will sleep during {n} seconds"
                    .format(n=settings.GEARMAN_RECONNECT_TIMEOUT))
                gevent.sleep(settings.GEARMAN_RECONNECT_TIMEOUT)

            logging.debug("Gearman worker try to connect {hosts}".format(
                hosts=', '.join(settings.GEARMAN['hosts'])))

            try:
                gm_worker = GearmanWorker(settings.GEARMAN['hosts'])
                gm_worker.set_client_id("socket_io_gearman_" +
                                        str(OPTIONS.port))
                gm_worker.register_task("socket_io", GearmanListener.callback)
                logging.debug("Gearman worker was successfull created")

                return gm_worker
            except Exception, e:
                logging.error(
                    "Error while initiation gearman worker connect with message: {message}"
                    .format(message=e.message))
                logging.debug("Demon will be sleep during {n} seconds".format(
                    n=settings.GEARMAN_RECONNECT_TIMEOUT))
                gevent.sleep(settings.GEARMAN_RECONNECT_TIMEOUT)
    def _gearmanWorkerNotPatched(self):

        self.logging.info("Gearmand worker instance started")
        while self.loop():
            try:
                self.worker_instance = GearmanWorker(self.kwargs.hostlist)
                self.worker_instance.register_task(self.kwargs.queue, self.consume)
                self.worker_instance.work()
            except Exception as err:
                self.logging.warn("Connection to gearmand failed. Reason: '%s'. Retry in 1 second." % err)
                sleep(1)
            finally:
                self.worker_instance.shutdown()
Exemple #10
0
class GEWorker(daemon.Daemon):
    "Our worker daemon - takes Gearman jobs and does them"
    
    def __init__(self):
        super(GEWorker, self).__init__()
        self.worker = GearmanWorker(settings.GEARMAN_SERVERS)

        self.worker.register_function(messaging.SUBJECT_THREAD_COUNT, self.thread_count)
        self.worker.register_function(messaging.SUBJECT_THREAD_NAMES, self.thread_names)

        self.worker.register_function(messaging.SUBJECT_AVG, self.average)

        self._set_transaction_isolation()

    def _set_transaction_isolation(self):
        """ 
        Defaut transaction isolation on MySQL InnoDB is REPEATABLE-READ, which means
        a connection always gets the same result for a given query, even if the data has
        changed.
        This daemon runs for a long time, we need to see db changes, so change to READ-COMMITTED.
        """
        
        cur = connection.cursor()
        cur.execute("set session transaction isolation level read committed")
        cur.close()

    def run(self):
        """Entry point for daemon.Daemon subclasses - main method"""
        self.worker.work()  # Never returns
    
    def wait_for_thread(self):
        'Sleeps until a thread is available. Gearman will queue the requests whilst we pause here'
        
        if settings.DEBUG:
            connection.queries = [] # Prevent query list growing indefinitely 

        running_threads = threading.active_count() 
        while running_threads >= settings.MAX_WORKER_THREADS:
            logging.debug(
                    'Waiting for thread. Currently %s: %s' % 
                    (running_threads, self.thread_names()) )
            time.sleep(1)
    
    def thread_count(self, job):
        'Returns number of threads currently running'
        return str(threading.active_count()) +'\n'

    def thread_names(self, job):
        'Returns an array of active thread names'
        return str( ', '.join([thread.name for thread in threading.enumerate()]) ) +'\n'
    
    def average(self, job):
        """ Calculates daily indicator average for group and overall
        @job.arg Id of the new Answer to include. 
        """
        self.wait_for_thread()
        worker_thread = AverageWorker(job.arg)
        worker_thread.start()
Exemple #11
0
class RequestWorker:
    worker = None

    def __init__(self):
        self.worker = GearmanWorker(['gearman.emag-bot.com'])
        self.worker.register_task('getrawdata', self.GetRawData)

    def Work(self):
        self.worker.work()
    
    def GetRawData(self, worker, job):
        print("Got job: " + job.data)

        ima = ImageAnalyser()

        ima.LoadImage(job.data, PathLocation.ONLINE)
        contour = ima.FindContour()
        im_e = ima.ExtractContour(contour)
        #ima.ShowImage(im_e, "mask")

        hist = ima.GetHueHistogram(im_e)

        return json.dumps(hist.flatten().tolist())
Exemple #12
0
    def __init__(self, host_list=None, styles = {}, stop = None, max_jobs=1000, lifetime=3600):
        GearmanWorker.__init__(self, host_list)

        self.maps = {}
        self.prj = {}
        self.stop = stop
        self.max_jobs = max_jobs
        self.done = 0
        self.lifetime = lifetime
        self.started = time.time()
        
        # Projects between tile pixel co-ordinates and LatLong (EPSG:4326)
        self.tileproj = SphericalProjection(MAX_ZOOM)

        for style in styles:
            logging.debug("Creating map object for %s using %s" % (style, styles[style]))
            m = mapnik.Map(TILE_SIZE, TILE_SIZE)
            self.maps[style] = m
            # Load XML style
            mapnik.load_map(m, styles[style], True)
            # Obtain <Map> projection
            self.prj[style] = mapnik.Projection(m.srs)
            self.register_task("render_" + style, render_job);
Exemple #13
0
    def setUp(self):
        self.start_server()
        self.last_exception = (None, None)
        self.worker = GearmanWorker(job_servers)
        self.worker.register_function("echo", echo)
        self.worker.register_function("fail", fail)
        self.worker.register_function("sleep", sleep, timeout=1)
        self.worker.register_class(ObjectWorker())
        self.worker.register_class(ClassWorker())
        class Hooks(object):
            @staticmethod
            def start(job):
                pass
            @staticmethod
            def complete(job, res):
                pass
            @staticmethod
            def fail(job, exc):
                self.last_exception = (job.func, exc)

        import thread
        self.worker_thread = thread.start_new_thread(self.worker.work, tuple(), dict(hooks=Hooks)) # TODO: Shouldn't use threads.. but we do for now (also, the thread is never terminated)
        self.client = GearmanClient(job_servers)
Exemple #14
0
def main():
    print("Running Worker .....")
    print("options.logging = %s" % options.logging)
    from demisaucepy import cache_setup
    cache_setup.load_cache()
    #global app
    #app = AppBase()
    logging.info("site_root = %s" % options.site_root)
    logging.info("smtp servers = %s" % options.smtp_server)
    logging.info("cache servers = %s" % options.memcached_servers)
    logging.info("gearman servers 2 = %s" % (options.gearman_servers))
    logging.error("where does this go in supervisord?")
    worker = GearmanWorker(options.gearman_servers)
    worker.register_function("email_send", emailer.email_send)
    worker.register_function("image_resize", assets.image_resize)
    worker.register_function("echo", echo)
    
    from demisauce.model import actions
    actions.register_workers(worker)
    worker.work()
Exemple #15
0
    def gearmanWorker(self):

        self.logging.info("Gearmand worker instance started")
        while self.loop():
            try:
                worker_instance = GearmanWorker(self.hostlist)
                worker_instance.register_task(self.queue, self.consume)
                worker_instance.work()
            except Exception as err:
                self.logging.warn(
                    'Connection to gearmand failed. Reason: %s. Retry in 1 second.'
                    % err)
                sleep(1)
Exemple #16
0
 def setUp(self):
     self.last_exception = (None, None)
     self.worker = GearmanWorker(job_servers)
     self.worker.register_function("echo", echo)
     self.worker.register_function("fail", fail)
     self.worker.register_function("sleep", sleep, timeout=1)
     class Hooks(object):
         @staticmethod
         def start(job):
             pass
         @staticmethod
         def complete(job, res):
             pass
         @staticmethod
         def fail(job, exc):
             self.last_exception = (job.func, exc)
     import thread
     thread.start_new_thread(self.worker.work, tuple(), dict(hooks=Hooks)) # TODO: Shouldn't use threads.. but we do for now (also, the thread is never terminated)
     self.client = GearmanClient(job_servers)
Exemple #17
0
#
# @author Yuan JIN
# @contact [email protected]
# @created May 16, 2012
# @updated May 18, 2012
#

##
# imports and CONSTANTS
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')

from gearman import GearmanWorker
GEARMAN_HOST = '127.0.0.1:4730'
gworker = GearmanWorker([GEARMAN_HOST])

from pymongo.connection import Connection
MONGO_CON = '127.0.0.1:27017'
mongo_con = Connection(MONGO_CON)
from pymongo.database import Database
MONGO_DB = 'note-keeper'
mongo_db = Database(mongo_con, MONGO_DB)
from pymongo.collection import Collection
MONGO_COL = 'text'
mongo_col = Collection(mongo_db, MONGO_COL)


def parse(bundle):
    '''find out information in the bundle'''
    data = bundle.split('||')
from gearman import GearmanWorker

def speak(job):
    r = 'Hello %s' % job.arg
    print r
    return r

worker = GearmanWorker(["127.0.0.1"])
worker.register_function('speak', speak, timeout=3)
worker.work()
        return False


def task_listener(gearman_worker, gearman_job):
    task_name, video_id, segment_id = pickle.loads(gearman_job.data)
    result = False

    if task_name == 'transcode':
        result = transcode_segment(video_id, segment_id)
    elif task_name == 'thumbnail':
        result = generate_thumbnail(video_id, segment_id)

    return pickle.dumps(result)


if __name__ == "__main__":
    # worker run

    logger.info("Setting up the worker.")
    gm_worker = GearmanWorker([GEARMAND_HOST_PORT])
    gm_worker.register_task(SEGMENT_TASK_NAME, task_listener)

    try:
        logger.info("Worker was set up successfully. Waiting for work.")
        gm_worker.work()

    except KeyboardInterrupt:
        gm_worker.shutdown()
        logger.info("Worker has shut down successfully. Bye.")
Exemple #20
0
 def __init__(self, host, queue):
     GearmanWorker.__init__(self, host)
     self.queue = queue
    
    jenkins_data = simplejson.loads(job.data)
    try:
        myj = Jenkins(jenkins_data['url'])
        job = myj.get_job(jenkins_data['job_id'])
        #job.invoke(securitytoken=token, block=block)
        job.invoke(invoke_pre_check_delay=0)
    except:
        rev="Not Happy!!!" 
    
    return rev

	
# Establish a connection with the job server on localhost--like the client,
# multiple job servers can be used.
worker = GearmanWorker(['localhost'])

# register_task will tell the job server that this worker handles the "echo"
# task
worker.set_client_id('your_worker_client_id_name')
worker.register_task('echo', task_listener_echo)
worker.register_task('build:pep8', task_listener_build)
worker.register_task('stop:jenkins_master.hp.com', task_listener_stop)
worker.register_task('bravo', task_listener_echo)
worker.register_task('reverse', task_listener_reverse)
worker.register_task('jenkins_invoke_job', task_listener_jenkins_invoke_job)

# Once setup is complete, begin working by consuming any tasks available
# from the job server
print 'working...'
worker.work()
Exemple #22
0
 def __init__(self, host, queue):
     GearmanWorker.__init__(self, host)
     self.queue = queue
Exemple #23
0
        time.sleep(10)
        print('waiting 10s... ')
        status = i.update()
    if status == 'running':
        print('running adding tag... ')
        import hashlib
        conn.create_tags([i.id],
                         {"name": "ScrambleDB" + random_md5like_hash()})
        # i.add_tag("Name","{{ScambleDB}}")

    else:
        print('Instance status: ' + status)

    #     security_groups=[ config["cloud"]["security_groups"]])

    return json.dumps(reservation)


# Establish a connection with the job server on localhost--like the client,
# multiple job servers can be used.
worker = GearmanWorker(['127.0.0.1:4731'])

# register_task will tell the job server that this worker handles the "echo"
# task
worker.register_task('cloud_cmd', cloud_cmd)

# Once setup is complete, begin working by consuming any tasks available
# from the job server
print 'working...'
worker.work()
 def handle(self, *args, **options):
   print "worker started"
   worker = GearmanWorker(["127.0.0.1"])
   worker.register_function("download", download)
   worker.work()
Exemple #25
0
    if file_id is not None:
        newfile = MEDIA_DIRECTORY + file_id
    else:
        newfile = MEDIA_DIRECTORY + gearman_job.data

    print "TASK RECEIVED FOR %s" % newfile  # @TODO timestamp

    # CONVERT TO WEBM
    cmd = "avconv -threads auto -i %s.mp4 -c:v libvpx -crf 10 \
           -b:v 768K -c:a libvorbis -deadline realtime \
           -cpu-used -10 %s.webm" % (newfile, newfile)
    cmd = cmd.encode('utf-8')

    result = os.system(cmd)

    if result != 0:
        print "TASK FAILURE"  # @TODO timestamp
        return "ERROR"  # @TODO return something more specific to the client

    os.chmod(newfile + ".webm", 0775)

    print "TASK COMPLETE"  # @TODO timestamp
    return "COMPLETE"  # @TODO return something more specific to the client


if not app.config.get('TESTING'):
    from gearman import GearmanWorker
    worker = GearmanWorker(GEARMAN_SERVERS)
    worker.register_task("generate_webm", generate_webm)
    worker.work()
class GearmanIn(Actor):

    '''**Consumes events/jobs from  Gearmand.**

    Consumes jobs from a Gearmand server.
    When secret is none, no decryption is done.


    Parameters:

        - hostlist(list)(["localhost:4730"])
           |  A list of gearmand servers.  Each entry should have
           |  format host:port.

        - secret(str)(None)
           |  The AES encryption key to decrypt Mod_gearman messages.

        - workers(int)(1)
           |  The number of gearman workers within 1 process.

        - queue(str)(wishbone)
           |  The queue to consume jobs from.

        - enable_keepalive(bool)(False)
           |  Attempt to monkey patch the gearmand module to enable socket
           |  keepalive.


    Queues:

        - outbox:   Outgoing events.

    '''

    def __init__(self, actor_config, hostlist=["localhost:4730"], secret=None, workers=1, queue="wishbone", enable_keepalive=False):
        Actor.__init__(self, actor_config)

        self.pool.createQueue("outbox")
        self.background_instances = []

        if self.kwargs.secret is None:
            self.decrypt = self.__plainTextJob
        else:
            key = self.kwargs.secret[0:32]
            self.cipher = AES.new(key + chr(0) * (32 - len(key)))
            self.decrypt = self.__encryptedJob

    def preHook(self):

        if self.kwargs.enable_keepalive:
            self.logging.info("Requested to monkey patch Gearmand")
            if gearman_version == "2.0.2":
                self.logging.info("Detected gearman version 2.0.2, patching sockets with SO_KEEPALIVE enabled.")
                self.gearmanWorker = self._gearmanWorkerPatched
            else:
                self.logging.warning("Did not detect gearman version 2.0.2. Not patching , patching sockets with keepalive enabled.")
                self.gearmanWorker = self._gearmanWorkerNotPatched
        else:
            self.gearmanWorker = self._gearmanWorkerNotPatched

        for _ in range(self.kwargs.workers):
            self.sendToBackground(self.gearmanWorker)

        self.sendToBackground(self.monitor)

    def consume(self, gearman_worker, gearman_job):

        decrypted = self.decrypt(gearman_job.data)
        event = Event(decrypted)
        self.submit(event, self.pool.queue.outbox)
        return gearman_job.data

    def __encryptedJob(self, data):
        return self.cipher.decrypt(base64.b64decode(data))

    def __plainTextJob(self, data):
        return data

    def _gearmanWorkerPatched(self):

        self.logging.info("Gearmand worker instance started")
        while self.loop():
            try:
                with mock.patch.object(GearmanConnection, '_create_client_socket', create_client_socket):
                    self.worker_instance = GearmanWorker(self.kwargs.hostlist)
                    self.worker_instance.register_task(self.kwargs.queue, self.consume)
                    self.worker_instance.work()
            except Exception as err:
                self.logging.warn("Connection to gearmand failed. Reason: '%s'. Retry in 1 second." % err)
                sleep(1)
            finally:
                self.worker_instance.shutdown()

    def _gearmanWorkerNotPatched(self):

        self.logging.info("Gearmand worker instance started")
        while self.loop():
            try:
                self.worker_instance = GearmanWorker(self.kwargs.hostlist)
                self.worker_instance.register_task(self.kwargs.queue, self.consume)
                self.worker_instance.work()
            except Exception as err:
                self.logging.warn("Connection to gearmand failed. Reason: '%s'. Retry in 1 second." % err)
                sleep(1)
            finally:
                self.worker_instance.shutdown()

    def monitor(self):

        self.logging.info("Connection monitor started.")
        while self.loop():
            sleep(5)
            for conn in self.worker_instance.connection_list:
                if not conn.connected:
                    self.logging.error("Connection to '%s' is dead.  Trying to reconnect." % (conn.gearman_host))
                    try:
                        conn.connect()
                        self.logging.info("Connection to '%s' is restored." % (conn.gearman_host))
                    except Exception as err:
                        self.logging.error("Failed to reconnect to '%s'. Retry in 5 seconds. Reason: '%s'" % (conn.gearman_host, err))
                else:
                    self.logging.debug("Connection to '%s' is alive." % (conn.gearman_host))
 # work function for gearman
def preprocess_gm( job ):
    try:
        job.status(0,0)
        myjob = GmJob( job )    # Gearman job
        myjob.run()
        result = myjob.finish()
    except Exception,e:
        print e
    print 'worker finished job:%s' % job.handle
    print '-'*80
    return result 

def preprocess_sa():
    myjob = SaJob()         # 单机job
    myjob.run()
    result = myjob.finish()
    wf = open('tmp.txt','w')
    wf.write(result)
    return result 
    
if __name__ == '__main__':
    if len(sys.argv)>1:
        preprocess_sa()
    else:
        worker = GearmanWorker( ['10.61.0.145'] )
        print "worker started."
        worker.register_function( 'crawl',preprocess_gm )
        worker.work()
Exemple #28
0
def main():
    gw = GearmanWorker(['127.0.0.1:4730'])
    gw.register_task("weibo_spider", weibo)
    gw.register_task("bbs_spider", bbs)
    gw.register_task("news_spider", news)
    gw.register_task("blog_spider", blog)
    gw.register_task("media_spider", media)
    gw.register_task("class_spider", same_class)
    gw.work()
Exemple #29
0
 def __init__(self):
     self.worker = GearmanWorker(['gearman.emag-bot.com'])
     self.worker.register_task('getrawdata', self.GetRawData)
# coding=utf-8

from gearman import GearmanWorker
from FromMongo import data_cluster
import json

def task_callback(gearman_worker,job):
    a = json.loads(job.data)
    from_col = a['from_col']
    fill_type = a['fill_type']
    attr_weight = a['attr_weight']
    needtransfer = a['needtransfer']
    Parameters = a['Parameters']
    demo = data_cluster(from_col,fill_type,attr_weight,needtransfer,Parameters)
    return 'successful received'

new_worker = GearmanWorker(['192.168.5.41:4730'])
new_worker.register_task("data_cluster", task_callback)
new_worker.work()
Exemple #31
0
        return False


def task_listener(gearman_worker, gearman_job):
    task_name, video_id, segment_id = pickle.loads(gearman_job.data)
    result = False

    if task_name == 'transcode':
        result = transcode_segment(video_id, segment_id)
    elif task_name == 'thumbnail':
        result = generate_thumbnail(video_id, segment_id)

    return pickle.dumps(result)


if __name__ == "__main__":
    # worker run

    logger.info("Setting up the worker.")
    gm_worker = GearmanWorker([GEARMAND_HOST_PORT])
    gm_worker.register_task(SEGMENT_TASK_NAME, task_listener)

    try:
        logger.info("Worker was set up successfully. Waiting for work.")
        gm_worker.work()

    except KeyboardInterrupt:
        gm_worker.shutdown()
        logger.info("Worker has shut down successfully. Bye.")
Exemple #32
0
        job_result = function_callback(self, current_job)
    except Exception:
        return self.on_job_exception(current_job, sys.exc_info())
    self.task_count +=1
    if self.task_count >= 50:
        print "max tasks reached. exiting"
        sys.exit()

    return self.on_job_complete(current_job, job_result)


GearmanWorker.task_count = 0
GearmanWorker.on_job_exception = on_job_exception
GearmanWorker.on_job_execute = on_job_execute

worker = GearmanWorker(JOBSERVER_LIST)

worker.set_client_id("working_on_the_djangoroad")

worker.register_task("add", add)
worker.register_task("pre_schedule", pre_schedule)
worker.register_task("row_calculator", row_calculator)
worker.register_task("row_aggregator", row_aggregator)

worker.register_task("echo", echo)


print "working"
#print dir(worker)
#print worker.worker_abilities
worker.work()
 def handle(self, *args, **options):
     print "worker started"
     worker = GearmanWorker(["127.0.0.1"])
     worker.register_function("download", download)
     worker.work()
Exemple #34
0
class Alligator(object):
    def __init__(self):
        config = json.load(open('config.json','r'))
        self.gm_worker = GearmanWorker([  config['gearmanip']+':'+str(config['gearmanport']) ])
        self.gm_worker.register_task(str(config["gearmanworker_apiai"]),self.run)
        self.wolframkey = config["wolfram_key"]
    def run(self,gearman_worker,gearman_job):
        result,lastresult = "",[]
        try:
            request = json.loads(gearman_job.data)
            session = request['uId']
            channel = request['channel']
            query = request['query']
            print "QUERY : ",query
            language = 'en'
            apirequest = "http://api.wolframalpha.com/v2/query?input="+query+"&appid="+self.wolframkey+"&format=image"
            value = requests.get(apirequest)
            tags = ET.fromstring(value._content)
            imagelistings = []
            for each in tags:
                if 'title' in each.attrib:
                    #print 'title : ',each.attrib['title']
                    if each.attrib['title'].lower().strip() == "result".lower():
                        text = each.find('subpod')
                        for all in text:
                            #print "all : ",all.attrib
                            if 'title' in all.attrib:
                                if channel == 'ui':
                                    result = result + all.attrib['title'] + "<br>"
                                else:
                                    result = result + all.attrib['title'] + "\n"
                                imagelistings.append(all.attrib['src'])
                        break
                    elif "input" not in each.attrib['title'].lower().strip():
                        text = each.find('subpod')
                        for all in text:
                            #print "all : ",all.attrib
                            if 'title' in all.attrib:
                                if all.attrib['title'].strip():
                                    print all.attrib
                                    lastresult.append((each.attrib['title'],all.attrib['title']))
                                if 'src' in all.attrib:
                                    print "title :",each.attrib['title']
                                    imagelistings.append(all.attrib['src'])
                else:
                    print "attribute without title : ",each.attrib
            imageflag = False
            print "RESULT : ",result
            if not result.strip():
                if channel == 'ui':
                    if lastresult:
                        print "in lastresult"
                        currenttitle = ""
                        for each,all in lastresult:
                            if each != currenttitle:
                                currenttitle = each
                                result = result + "<br><b><u> "+each+": </u></b>"
                            result = result + "<br> "+all
                    elif imagelistings:
                        imageflag = True
                        print "in imagelistings",imagelistings
                        for each in imagelistings:
                            result = result + '<img src = "'+each+'" /> <br>'
                    else:
                        result = self.randomResponses()
                else:
                    if lastresult:
                        print "in lastresult"
                        currenttitle = ""
                        for each,all in lastresult:
                            if each != currenttitle and each.lower().strip() != "response":
                                currenttitle = each
                                result = result + "\n"+each+": "
                            result = result + "\n \t "+all
                    else:
                        result = self.randomResponses()
            print "RESULT : ",result
            if "data not available" in result:
                result = self.randomResponses()
            if "wolfram" in result.lower() and not imageflag:
                if "stephen" not in result.lower():
                    resultlist = result.split()
                    for each,value in enumerate(resultlist):
                        print each,value
                        if 'wolfram' in resultlist[each].lower():
                            resultlist[each] = "Alligator"
                            result = " ".join(resultlist)
                else:
                    result = result.replace("Stephen Wolfram","Kannan Piedy")
            #result = '<img src = "http://localhost:7000/image/logo.png" />'
            return json.dumps({'result':result,'sessionId':channel+'_'+session})
        except Exception,e:
            print "Exception in Run : ",e
Exemple #35
0
        newfile = MEDIA_DIRECTORY + file_id
    else:
        newfile = MEDIA_DIRECTORY + gearman_job.data

    print "TASK RECEIVED FOR %s" % newfile # @TODO timestamp

    # CONVERT TO WEBM
    cmd = "avconv -threads auto -i %s.mp4 -c:v libvpx -crf 10 \
           -b:v 768K -c:a libvorbis -deadline realtime \
           -cpu-used -10 %s.webm" % (newfile, newfile)
    cmd = cmd.encode('utf-8')

    result = os.system(cmd)
    
    if result != 0:
        print "TASK FAILURE" # @TODO timestamp
        return "ERROR" # @TODO return something more specific to the client
                
    os.chmod(newfile + ".webm", 0775)

    print "TASK COMPLETE" # @TODO timestamp
    return "COMPLETE" # @TODO return something more specific to the client



if not app.config.get('TESTING'):
  from gearman import GearmanWorker
  worker = GearmanWorker(GEARMAN_SERVERS)
  worker.register_task("generate_webm", generate_webm)
  worker.work()
Exemple #36
0
 def __init__(self):
     config = json.load(open('config.json','r'))
     self.gm_worker = GearmanWorker([  config['gearmanip']+':'+str(config['gearmanport']) ])
     self.gm_worker.register_task(str(config["gearmanworker_apiai"]),self.run)
     self.wolframkey = config["wolfram_key"]
Exemple #37
0
def create_worker():
    discover_workers()
    worker = GearmanWorker(settings.GEARMAN_SERVERS)
    for id, func in workers.iteritems():
        worker.register_function(id, func)
    return worker
Exemple #38
0
def preprocess_gm(job):
    try:
        job.status(0, 0)
        myjob = GmJob(job)  # Gearman job
        myjob.run()
        result = myjob.finish()
    except Exception, e:
        print e
    print 'worker finished job:%s' % job.handle
    print '-' * 80
    return result


def preprocess_sa():
    myjob = SaJob()  # 单机job
    myjob.run()
    result = myjob.finish()
    wf = open('tmp.txt', 'w')
    wf.write(result)
    return result


if __name__ == '__main__':
    if len(sys.argv) > 1:
        preprocess_sa()
    else:
        worker = GearmanWorker(['10.61.0.145'])
        print "worker started."
        worker.register_function('crawl', preprocess_gm)
        worker.work()
Exemple #39
0
#!/usr/bin/python
from gearman import GearmanWorker
# TODO: Initialize Logging

# TODO: Gearman configuration file to be used
gm_worker = GearmanWorker(['localhost:4730'])

#Data Structure to go for storing/retrieving top N results 
import heapq

import hashlib

# TODO: Use Redis Configuration files
import redis
redis_server = redis.Redis('localhost')



########################################################################
#  Helper functions
########################################################################

def topncompute(word):
    #NOTE: Heap List on which top N will be computed
    all_topn_compare = []

    #NOTE: Dict on which data for a word from new/old files came in    
    topncompute = {}

    #NOTE: The string to be replaced can be customized based on the pattern that you want
    hash_object = hashlib.md5(word.replace('tbp_', ''))
# coding = utf-8
import os
import gearman
import math
from gearman import GearmanWorker
from lbs_class import user_lbs
import json

def task_callback(gearman_worker, job):    
    print job.data
    a = json.loads(job.data)
    store = a['store']
    dist = a['dist']
    center = [a['center']['lng'],a['center']['lat']]
    start = a['time_start']
    end = a['time_end']
    klx = user_lbs(store,dist,center,start,end)
    return 'successful received'

new_worker = GearmanWorker(['192.168.5.41:4730'])
new_worker.register_task("user_lbs", task_callback)
new_worker.work()
Exemple #41
0
__author__ = 'fanbin'

from strategy import CurrentStrategy
from portfolio import MarketOnClosePortfolio
from optimize import NaiveOptimizer
from constraint import Constraint
import bindata
import json
from gearman import GearmanWorker

gm_worker = GearmanWorker(['127.0.0.1:4730'])

def task_backtest(gearman_worker, gearman_job):
    symbol = ['000001', '603993']
    bars = bindata.BackTestData(bindata.raw)
    # Apply our current strategy on the chosen stock pool
    rfs = CurrentStrategy(symbol, bars)
    # specify constraints, here is the default one
    cons = Constraint()
    # specify a naive optimizer
    opt = NaiveOptimizer(cons)

    data = json.loads(gearman_job.data)
    function_list = {}
    signal_generator = compile(data["code"], '', 'exec')
    exec signal_generator in function_list

    # Create a portfolio
    portfolio = MarketOnClosePortfolio(symbol, bars, rfs, \
                opt, initial_capital=1000000.0)
    portfolio.strategy.sig_generator = function_list["generate_signals"]
Exemple #42
0
    jenkins_data = simplejson.loads(job.data)
    try:
        myj = Jenkins(jenkins_data['url'])
        job = myj.get_job(jenkins_data['job_id'])
        #job.invoke(securitytoken=token, block=block)
        job.invoke(invoke_pre_check_delay=0)
    except:
        rev = "Not Happy!!!"

    return rev


# Establish a connection with the job server on localhost--like the client,
# multiple job servers can be used.
worker = GearmanWorker(['localhost'])

# register_task will tell the job server that this worker handles the "echo"
# task
worker.set_client_id('your_worker_client_id_name')
worker.register_task('echo', task_listener_echo)
worker.register_task('build:pep8', task_listener_build)
worker.register_task('stop:jenkins_master.hp.com', task_listener_stop)
worker.register_task('bravo', task_listener_echo)
worker.register_task('reverse', task_listener_reverse)
worker.register_task('jenkins_invoke_job', task_listener_jenkins_invoke_job)

# Once setup is complete, begin working by consuming any tasks available
# from the job server
print 'working...'
worker.work()
Exemple #43
0
        activity_info_config.update({'_id': elem['_id']},
                            {'$set': {'is_running': False,
                                      '__MODIFY_TIME__': datetime.datetime.now(utc)}})

    return 'successful received kanjia'

def task_city_tag(GermanWorker, job):
    print ('city_tag work received')
    print job.data
    activity_info_config = mapreduce['city_tag_config']
    print (activity_info_config.find_one())
    for elem in activity_info_config.find({"__REMOVED__": False,
                                           "is_running": False}):
        activity_info_config.update({'_id': elem['_id']},
                                    {'$set': {'is_running': True,
                                              '__MODIFY_TIME__': datetime.datetime.now(utc)}})

        begin = elem['begin']
        end = elem['end']
        detail = elem['collection_name']
        one = CityTag(begin_name=begin,end_name=end,collection_name=detail)
        result = one.get_city_info()
        print result
    return 'successful finished city_tag job'


new_worker = GearmanWorker(['192.168.5.41:4730'])
new_worker.register_task("task_city", task_city_tag)
new_worker.register_task("task_kanjia", task_kanjia)
new_worker.register_task("task_gender", task_gender)
new_worker.work()
Exemple #44
0
def create_worker():
    discover_workers()
    worker = GearmanWorker(settings.GEARMAN_SERVERS)
    for id, func in workers.iteritems():
        worker.register_function(id, func)
    return worker
Exemple #45
0
    def close_all(self):
        self.browser.close()
        self.browser.quit()
        self.log(u'Browser process was ended')
        self.log(u'')

    def wait_for(self, by, el):
        element = WebDriverWait(self.browser, 10).until(
            EC.presence_of_element_located((by, el)))
        return element

    def log(self, text):
        if self.debug:
            log_date = datetime.datetime.now()
            formatted_date = log_date.__format__("%d-%m-%Y %H:%M:%S")
            print("[{}] {}".format(formatted_date, text))


def parse_friends(worker, job):
    job_arr = json.loads(job.data)
    br = Browser(debug, head)
    br.auth(job_arr['auth'])
    job_result = br.get_users_friends(job_arr['users'])
    br.close_all()
    return json.dumps(job_result)


worker = GearmanWorker([args.gearman_host])
worker.register_task('parseFriends', parse_friends)
worker.work()
    def __init__(self):
        self.worker = GearmanWorker(['gearman.emag-bot.com'])
        self.worker.register_task('imgrecon', self.ImageRecognition)

        self.db = DBInterface()
#!/usr/bin/python
from gearman import GearmanWorker
# TODO: Initialize Logging
# TODO:  Direct log to file

# TODO: Gearman configuration file to be used
gm_worker = GearmanWorker(['localhost:4730'])

# TODO: Use Redis Configuration files
import redis

redis_server = redis.Redis('localhost')

#Other Libraries
import hashlib
import email
import re
import os

################################################
# Helper Functions
################################################
# TODO: can add more stop words
_STOP_WORDS = ['a', 'an', 'the', '']

# TODO: Can modify process_words to just process for inclusion list
# TODO: Stemming can be done
# TODO: Parse EMAIL content


# Taken from: https://bitquabit.com/post/having-fun-python-and-elasticsearch-part-2/
class GearmanIn(Actor):
    '''**Consumes events/jobs from  Gearmand.**

    Consumes jobs from a Gearmand server.
    When secret is none, no decryption is done.


    Parameters:

        - hostlist(list)(["localhost:4730"])
           |  A list of gearmand servers.  Each entry should have
           |  format host:port.

        - secret(str)(None)
           |  The AES encryption key to decrypt Mod_gearman messages.

        - workers(int)(1)
           |  The number of gearman workers within 1 process.

        - queue(str)(wishbone)
           |  The queue to consume jobs from.

        - enable_keepalive(bool)(False)
           |  Attempt to monkey patch the gearmand module to enable socket
           |  keepalive.


    Queues:

        - outbox:   Outgoing events.

    '''
    def __init__(self,
                 actor_config,
                 hostlist=["localhost:4730"],
                 secret=None,
                 workers=1,
                 queue="wishbone",
                 enable_keepalive=False):
        Actor.__init__(self, actor_config)

        self.pool.createQueue("outbox")
        self.background_instances = []

        if self.kwargs.secret is None:
            self.decrypt = self.__plainTextJob
        else:
            key = self.kwargs.secret[0:32]
            self.cipher = AES.new(key + chr(0) * (32 - len(key)))
            self.decrypt = self.__encryptedJob

    def preHook(self):

        if self.kwargs.enable_keepalive:
            self.logging.info("Requested to monkey patch Gearmand")
            if gearman_version == "2.0.2":
                self.logging.info(
                    "Detected gearman version 2.0.2, patching sockets with SO_KEEPALIVE enabled."
                )
                self.gearmanWorker = self._gearmanWorkerPatched
            else:
                self.logging.warning(
                    "Did not detect gearman version 2.0.2. Not patching , patching sockets with keepalive enabled."
                )
                self.gearmanWorker = self._gearmanWorkerNotPatched
        else:
            self.gearmanWorker = self._gearmanWorkerNotPatched

        for _ in range(self.kwargs.workers):
            self.sendToBackground(self.gearmanWorker)

        self.sendToBackground(self.monitor)

    def consume(self, gearman_worker, gearman_job):

        decrypted = self.decrypt(gearman_job.data)
        event = Event(decrypted)
        self.submit(event, self.pool.queue.outbox)
        return gearman_job.data

    def __encryptedJob(self, data):
        return self.cipher.decrypt(base64.b64decode(data))

    def __plainTextJob(self, data):
        return data

    def _gearmanWorkerPatched(self):

        self.logging.info("Gearmand worker instance started")
        while self.loop():
            try:
                with mock.patch.object(GearmanConnection,
                                       '_create_client_socket',
                                       create_client_socket):
                    self.worker_instance = GearmanWorker(self.kwargs.hostlist)
                    self.worker_instance.register_task(self.kwargs.queue,
                                                       self.consume)
                    self.worker_instance.work()
            except Exception as err:
                self.logging.warn(
                    "Connection to gearmand failed. Reason: '%s'. Retry in 1 second."
                    % err)
                sleep(1)
            finally:
                self.worker_instance.shutdown()

    def _gearmanWorkerNotPatched(self):

        self.logging.info("Gearmand worker instance started")
        while self.loop():
            try:
                self.worker_instance = GearmanWorker(self.kwargs.hostlist)
                self.worker_instance.register_task(self.kwargs.queue,
                                                   self.consume)
                self.worker_instance.work()
            except Exception as err:
                self.logging.warn(
                    "Connection to gearmand failed. Reason: '%s'. Retry in 1 second."
                    % err)
                sleep(1)
            finally:
                self.worker_instance.shutdown()

    def monitor(self):

        self.logging.info("Connection monitor started.")
        while self.loop():
            sleep(5)
            for conn in self.worker_instance.connection_list:
                if not conn.connected:
                    self.logging.error(
                        "Connection to '%s' is dead.  Trying to reconnect." %
                        (conn.gearman_host))
                    try:
                        conn.connect()
                        self.logging.info("Connection to '%s' is restored." %
                                          (conn.gearman_host))
                    except Exception as err:
                        self.logging.error(
                            "Failed to reconnect to '%s'. Retry in 5 seconds. Reason: '%s'"
                            % (conn.gearman_host, err))
                else:
                    self.logging.debug("Connection to '%s' is alive." %
                                       (conn.gearman_host))
Exemple #49
0
class WorkerPid(object):
    worker = GearmanWorker(GEARMAN_SERVER)

    @classmethod
    def send_email(cls, worker, job):
        '''
        send email to every publisher about AD
        '''
        data = json.loads(job.data)
        email_server = data.get('email_server')
        email_server_port = data.get('email_server_port')
        username = data.get('username')
        password = data.get('password')
        sender = data.get('sender')
        receiver = data.get('receiver')
        content = data.get('msg')
        email_id = data.get('email_id')

        msg = MIMEMultipart('alternative')
        msg['Subject'] = content.get('subject')
        msg['From'] = sender
        msg['To'] = receiver

        if content.get('text'):
            text = MIMEText(content.get('text'), 'plain', 'utf-8')
            msg.attach(text)
        if content.get('html'):
            html = MIMEText(content.get('html'), 'html', 'utf-8')
            msg.attach(html)
        if content.get('attachment'):
            mime = MIMEBase('application',
                            'octet-stream',
                            filename=content.get('attachment_name'))
            mime.add_header('Content-Disposition',
                            'attachment',
                            filename=content.get('attachment_name'))
            mime.add_header('Content-ID', '<0>')
            mime.add_header('X-Attachment-Id', '0')
            mime.set_payload(content.get('attachment').decode('base64'))
            encoders.encode_base64(mime)
            msg.attach(mime)

        try:
            # Create the body of the message (a plain-text and an HTML version).
            smtp = smtplib.SMTP()
            smtp.connect(email_server, int(email_server_port))
            smtp.ehlo()
            smtp.starttls()
            smtp.ehlo()
            smtp.login(username, password)
            smtp.sendmail(sender, receiver, msg.as_string())
            smtp.quit()
        except smtplib.SMTPAuthenticationError as e:
            EMail._fail_email(email_id,
                              receiver,
                              reason=u'send email failure, error={}'.format(e))
        except smtplib.SMTPRecipientsRefused as e:
            EMail._fail_email(email_id,
                              reason=u'receiver refused, error={}'.format(e))
        except smtplib.SMTPSenderRefused as e:
            EMail._fail_email(email_id,
                              reason=u'sender refused, error={}'.format(e))
        except Exception as e:
            EMail._fail_email(email_id,
                              reason=u'unknown reason, error={}'.format(e))
        finally:
            return data.receiver

    @classmethod
    def main(cls):
        cls.worker.register_task('email', cls.send_email)
        print 'Working...'
        cls.worker.work()
Exemple #50
0
#import pdb; pdb.set_trace()
from gearman import GearmanWorker


def message_recieved(gearman_worker, gearman_job):
    return "Message Recieved"


worker = GearmanWorker(["localhost:4730"])
worker.register_task('letsdosomething', message_recieved)
#print(dir(worker))
worker.work()
#    jenkins_data = simplejson.loads(job.data)
#    try:
#        myj = Jenkins(jenkins_data['url'])
#        job = myj.get_job(jenkins_data['job_id'])
#        #job.invoke(securitytoken=token, block=block)
#        job.invoke(invoke_pre_check_delay=0)
#    except:
#        rev="Not Happy!!!" 
#    
#    return rev

	
# Establish a connection with the job server on localhost--like the client,
# multiple job servers can be used.
#worker = GearmanWorker(['localhost:4730'])
worker = GearmanWorker(['15.185.117.66:4730'])

# register_task will tell the job server that this worker handles the "echo"
# task
worker.set_client_id('your_worker_client_id_name')
worker.register_task('echo', task_listener_echo)
worker.register_task('build:pep8', task_listener_build)
#worker.register_task('stop:jenkins_master.hp.com', task_listener_stop)
worker.register_task('bravo', task_listener_echo)
worker.register_task('reverse', task_listener_reverse)
#worker.register_task('jenkins_invoke_job', task_listener_jenkins_invoke_job)

# Once setup is complete, begin working by consuming any tasks available
# from the job server
print 'working...'
worker.work()
Exemple #52
0
from pyModbusTCP.client import ModbusClient
from gearman import GearmanWorker
import json

def reader(worker, job):
    c = ModbusClient(host="localhost", port=502)

    if not c.is_open() and not c.open():
        print("unable to connect to host")

    if c.is_open():

        holdingRegisters = c.read_holding_registers(1, 4)

        # Imagine we've "energy" value in position 1 with two words
        energy = (holdingRegisters[0] << 16) | holdingRegisters[1]

        # Imagine we've "power" value in position 3 with two words
        power = (holdingRegisters[2] << 16) | holdingRegisters[3]

        out = {"energy": energy, "power": power}
        return json.dumps(out)
    return None

worker = GearmanWorker(['127.0.0.1'])

worker.register_task('modbusReader', reader)

print 'working...'
worker.work()
      time.sleep(10)
      print('waiting 10s... ')
      status = i.update()
   if status == 'running':
      print('running adding tag... ')
      import hashlib
      conn.create_tags([i.id], {"name": "ScrambleDB" +random_md5like_hash()})
      # i.add_tag("Name","{{ScambleDB}}")
      
   else:
      print('Instance status: ' + status)
    
   #     security_groups=[ config["cloud"]["security_groups"]])
   
   return json.dumps(reservation)    

# Establish a connection with the job server on localhost--like the client,
# multiple job servers can be used.
worker = GearmanWorker(['127.0.0.1:4731'])

# register_task will tell the job server that this worker handles the "echo"
# task
worker.register_task('cloud_cmd', cloud_cmd)

# Once setup is complete, begin working by consuming any tasks available
# from the job server
print 'working...'
worker.work()


Exemple #54
0
__author__ = 'fanbin'

from strategy import CurrentStrategy
from portfolio import MarketOnClosePortfolio
from optimize import NaiveOptimizer
from constraint import Constraint
import bindata
import json
from gearman import GearmanWorker

gm_worker = GearmanWorker(['127.0.0.1:4730'])


def task_backtest(gearman_worker, gearman_job):
    symbol = ['000001', '603993']
    bars = bindata.BackTestData(bindata.raw)
    # Apply our current strategy on the chosen stock pool
    rfs = CurrentStrategy(symbol, bars)
    # specify constraints, here is the default one
    cons = Constraint()
    # specify a naive optimizer
    opt = NaiveOptimizer(cons)

    data = json.loads(gearman_job.data)
    function_list = {}
    signal_generator = compile(data["code"], '', 'exec')
    exec signal_generator in function_list

    # Create a portfolio
    portfolio = MarketOnClosePortfolio(symbol, bars, rfs, \
                opt, initial_capital=1000000.0)