def _init(self): while True: while not self.is_available(): logging.error( "Gearman not available right now. Demon will sleep during {n} seconds" .format(n=settings.GEARMAN_RECONNECT_TIMEOUT)) gevent.sleep(settings.GEARMAN_RECONNECT_TIMEOUT) logging.debug("Gearman worker try to connect {hosts}".format( hosts=', '.join(settings.GEARMAN['hosts']))) try: gm_worker = GearmanWorker(settings.GEARMAN['hosts']) gm_worker.set_client_id("socket_io_gearman_" + str(OPTIONS.port)) gm_worker.register_task("socket_io", GearmanListener.callback) logging.debug("Gearman worker was successfull created") return gm_worker except Exception, e: logging.error( "Error while initiation gearman worker connect with message: {message}" .format(message=e.message)) logging.debug("Demon will be sleep during {n} seconds".format( n=settings.GEARMAN_RECONNECT_TIMEOUT)) gevent.sleep(settings.GEARMAN_RECONNECT_TIMEOUT)
def main(): gw = GearmanWorker(['127.0.0.1:4730']) gw.register_task("weibo_spider", weibo) gw.register_task("bbs_spider", bbs) gw.register_task("news_spider", news) gw.register_task("blog_spider", blog) gw.register_task("media_spider", media) gw.register_task("class_spider", same_class) gw.work()
def gearmanWorker(self): self.logging.info("Gearmand worker instance started") while self.loop(): try: worker_instance = GearmanWorker(self.hostlist) worker_instance.register_task(self.queue, self.consume) worker_instance.work() except Exception as err: self.logging.warn( 'Connection to gearmand failed. Reason: %s. Retry in 1 second.' % err) sleep(1)
def _gearmanWorkerNotPatched(self): self.logging.info("Gearmand worker instance started") while self.loop(): try: self.worker_instance = GearmanWorker(self.kwargs.hostlist) self.worker_instance.register_task(self.kwargs.queue, self.consume) self.worker_instance.work() except Exception as err: self.logging.warn( "Connection to gearmand failed. Reason: '%s'. Retry in 1 second." % err) sleep(1) finally: self.worker_instance.shutdown()
def setUp(self): self.start_server() self.last_exception = (None, None) self.worker = GearmanWorker(job_servers) self.worker.register_function("echo", echo) self.worker.register_function("fail", fail) self.worker.register_function("sleep", sleep, timeout=1) self.worker.register_class(ObjectWorker()) self.worker.register_class(ClassWorker()) class Hooks(object): @staticmethod def start(job): pass @staticmethod def complete(job, res): pass @staticmethod def fail(job, exc): self.last_exception = (job.func, exc) import thread self.worker_thread = thread.start_new_thread(self.worker.work, tuple(), dict(hooks=Hooks)) # TODO: Shouldn't use threads.. but we do for now (also, the thread is never terminated) self.client = GearmanClient(job_servers)
jenkins_data = simplejson.loads(job.data) try: myj = Jenkins(jenkins_data['url']) job = myj.get_job(jenkins_data['job_id']) #job.invoke(securitytoken=token, block=block) job.invoke(invoke_pre_check_delay=0) except: rev = "Not Happy!!!" return rev # Establish a connection with the job server on localhost--like the client, # multiple job servers can be used. worker = GearmanWorker(['localhost']) # register_task will tell the job server that this worker handles the "echo" # task worker.set_client_id('your_worker_client_id_name') worker.register_task('echo', task_listener_echo) worker.register_task('build:pep8', task_listener_build) worker.register_task('stop:jenkins_master.hp.com', task_listener_stop) worker.register_task('bravo', task_listener_echo) worker.register_task('reverse', task_listener_reverse) worker.register_task('jenkins_invoke_job', task_listener_jenkins_invoke_job) # Once setup is complete, begin working by consuming any tasks available # from the job server print 'working...' worker.work()
#!/usr/bin/python from gearman import GearmanWorker # TODO: Initialize Logging # TODO: Direct log to file # TODO: Gearman configuration file to be used gm_worker = GearmanWorker(['localhost:4730']) # TODO: Use Redis Configuration files import redis redis_server = redis.Redis('localhost') #Other Libraries import hashlib import email import re import os ################################################ # Helper Functions ################################################ # TODO: can add more stop words _STOP_WORDS = ['a', 'an', 'the', ''] # TODO: Can modify process_words to just process for inclusion list # TODO: Stemming can be done # TODO: Parse EMAIL content # Taken from: https://bitquabit.com/post/having-fun-python-and-elasticsearch-part-2/
def close_all(self): self.browser.close() self.browser.quit() self.log(u'Browser process was ended') self.log(u'') def wait_for(self, by, el): element = WebDriverWait(self.browser, 10).until( EC.presence_of_element_located((by, el))) return element def log(self, text): if self.debug: log_date = datetime.datetime.now() formatted_date = log_date.__format__("%d-%m-%Y %H:%M:%S") print("[{}] {}".format(formatted_date, text)) def parse_friends(worker, job): job_arr = json.loads(job.data) br = Browser(debug, head) br.auth(job_arr['auth']) job_result = br.get_users_friends(job_arr['users']) br.close_all() return json.dumps(job_result) worker = GearmanWorker([args.gearman_host]) worker.register_task('parseFriends', parse_friends) worker.work()
def create_worker(): discover_workers() worker = GearmanWorker(settings.GEARMAN_SERVERS) for id, func in workers.iteritems(): worker.register_function(id, func) return worker
return False def task_listener(gearman_worker, gearman_job): task_name, video_id, segment_id = pickle.loads(gearman_job.data) result = False if task_name == 'transcode': result = transcode_segment(video_id, segment_id) elif task_name == 'thumbnail': result = generate_thumbnail(video_id, segment_id) return pickle.dumps(result) if __name__ == "__main__": # worker run logger.info("Setting up the worker.") gm_worker = GearmanWorker([GEARMAND_HOST_PORT]) gm_worker.register_task(SEGMENT_TASK_NAME, task_listener) try: logger.info("Worker was set up successfully. Waiting for work.") gm_worker.work() except KeyboardInterrupt: gm_worker.shutdown() logger.info("Worker has shut down successfully. Bye.")
def handle(self, *args, **options): print "worker started" worker = GearmanWorker(["127.0.0.1"]) worker.register_function("download", download) worker.work()
class WorkerPid(object): worker = GearmanWorker(GEARMAN_SERVER) @classmethod def send_email(cls, worker, job): ''' send email to every publisher about AD ''' data = json.loads(job.data) email_server = data.get('email_server') email_server_port = data.get('email_server_port') username = data.get('username') password = data.get('password') sender = data.get('sender') receiver = data.get('receiver') content = data.get('msg') email_id = data.get('email_id') msg = MIMEMultipart('alternative') msg['Subject'] = content.get('subject') msg['From'] = sender msg['To'] = receiver if content.get('text'): text = MIMEText(content.get('text'), 'plain', 'utf-8') msg.attach(text) if content.get('html'): html = MIMEText(content.get('html'), 'html', 'utf-8') msg.attach(html) if content.get('attachment'): mime = MIMEBase('application', 'octet-stream', filename=content.get('attachment_name')) mime.add_header('Content-Disposition', 'attachment', filename=content.get('attachment_name')) mime.add_header('Content-ID', '<0>') mime.add_header('X-Attachment-Id', '0') mime.set_payload(content.get('attachment').decode('base64')) encoders.encode_base64(mime) msg.attach(mime) try: # Create the body of the message (a plain-text and an HTML version). smtp = smtplib.SMTP() smtp.connect(email_server, int(email_server_port)) smtp.ehlo() smtp.starttls() smtp.ehlo() smtp.login(username, password) smtp.sendmail(sender, receiver, msg.as_string()) smtp.quit() except smtplib.SMTPAuthenticationError as e: EMail._fail_email(email_id, receiver, reason=u'send email failure, error={}'.format(e)) except smtplib.SMTPRecipientsRefused as e: EMail._fail_email(email_id, reason=u'receiver refused, error={}'.format(e)) except smtplib.SMTPSenderRefused as e: EMail._fail_email(email_id, reason=u'sender refused, error={}'.format(e)) except Exception as e: EMail._fail_email(email_id, reason=u'unknown reason, error={}'.format(e)) finally: return data.receiver @classmethod def main(cls): cls.worker.register_task('email', cls.send_email) print 'Working...' cls.worker.work()
#import pdb; pdb.set_trace() from gearman import GearmanWorker def message_recieved(gearman_worker, gearman_job): return "Message Recieved" worker = GearmanWorker(["localhost:4730"]) worker.register_task('letsdosomething', message_recieved) #print(dir(worker)) worker.work()
def preprocess_gm(job): try: job.status(0, 0) myjob = GmJob(job) # Gearman job myjob.run() result = myjob.finish() except Exception, e: print e print 'worker finished job:%s' % job.handle print '-' * 80 return result def preprocess_sa(): myjob = SaJob() # 单机job myjob.run() result = myjob.finish() wf = open('tmp.txt', 'w') wf.write(result) return result if __name__ == '__main__': if len(sys.argv) > 1: preprocess_sa() else: worker = GearmanWorker(['10.61.0.145']) print "worker started." worker.register_function('crawl', preprocess_gm) worker.work()
def __init__(self): config = json.load(open('config.json','r')) self.gm_worker = GearmanWorker([ config['gearmanip']+':'+str(config['gearmanport']) ]) self.gm_worker.register_task(str(config["gearmanworker_apiai"]),self.run) self.wolframkey = config["wolfram_key"]
if file_id is not None: newfile = MEDIA_DIRECTORY + file_id else: newfile = MEDIA_DIRECTORY + gearman_job.data print "TASK RECEIVED FOR %s" % newfile # @TODO timestamp # CONVERT TO WEBM cmd = "avconv -threads auto -i %s.mp4 -c:v libvpx -crf 10 \ -b:v 768K -c:a libvorbis -deadline realtime \ -cpu-used -10 %s.webm" % (newfile, newfile) cmd = cmd.encode('utf-8') result = os.system(cmd) if result != 0: print "TASK FAILURE" # @TODO timestamp return "ERROR" # @TODO return something more specific to the client os.chmod(newfile + ".webm", 0775) print "TASK COMPLETE" # @TODO timestamp return "COMPLETE" # @TODO return something more specific to the client if not app.config.get('TESTING'): from gearman import GearmanWorker worker = GearmanWorker(GEARMAN_SERVERS) worker.register_task("generate_webm", generate_webm) worker.work()
time.sleep(10) print('waiting 10s... ') status = i.update() if status == 'running': print('running adding tag... ') import hashlib conn.create_tags([i.id], {"name": "ScrambleDB" + random_md5like_hash()}) # i.add_tag("Name","{{ScambleDB}}") else: print('Instance status: ' + status) # security_groups=[ config["cloud"]["security_groups"]]) return json.dumps(reservation) # Establish a connection with the job server on localhost--like the client, # multiple job servers can be used. worker = GearmanWorker(['127.0.0.1:4731']) # register_task will tell the job server that this worker handles the "echo" # task worker.register_task('cloud_cmd', cloud_cmd) # Once setup is complete, begin working by consuming any tasks available # from the job server print 'working...' worker.work()
def __init__(self): self.worker = GearmanWorker(['gearman.emag-bot.com']) self.worker.register_task('imgrecon', self.ImageRecognition) self.db = DBInterface()
def __init__(self): self.worker = GearmanWorker(['gearman.emag-bot.com']) self.worker.register_task('getrawdata', self.GetRawData)
__author__ = 'fanbin' from strategy import CurrentStrategy from portfolio import MarketOnClosePortfolio from optimize import NaiveOptimizer from constraint import Constraint import bindata import json from gearman import GearmanWorker gm_worker = GearmanWorker(['127.0.0.1:4730']) def task_backtest(gearman_worker, gearman_job): symbol = ['000001', '603993'] bars = bindata.BackTestData(bindata.raw) # Apply our current strategy on the chosen stock pool rfs = CurrentStrategy(symbol, bars) # specify constraints, here is the default one cons = Constraint() # specify a naive optimizer opt = NaiveOptimizer(cons) data = json.loads(gearman_job.data) function_list = {} signal_generator = compile(data["code"], '', 'exec') exec signal_generator in function_list # Create a portfolio portfolio = MarketOnClosePortfolio(symbol, bars, rfs, \ opt, initial_capital=1000000.0)