def deal(self): self.write('work received<br>') new_client = GearmanClient(['192.168.5.41:4730']) current_request = new_client.submit_job('task_kanjia','heal the world') new_result = current_request.result print new_result self.write('work finished')
def analysis_address(row): out_row = [] input_id = [] input_addr = row[1].encode("utf-8") api_output_id = [] api_output_addr = [] client = GearmanClient([GEARMAN_API]) job_request = client.submit_job(FUNCTION, input_addr) loaded_json = json.loads(job_request.result) for addr in loaded_json: api_output_id.append(addr['id']) api_output_addr.append(addr['string']) if (row[6] != ''): input_id = row[6].strip().split(" ") input_id = list(map(int, input_id)) compare_list = set(input_id) ^ set(api_output_id) different = len(compare_list) str_api_output_addr = ', '.join(api_output_addr) str_api_output_id = ' '.join(str(e) for e in api_output_id) out_row = [ row[0], row[1], row[2], str_api_output_addr, row[6], str_api_output_id, different ] return out_row
def obj_create(self, bundle, **kwargs): bundle.obj = Task(procedure_url=bundle.data["ordered_tasks"][0], input_dataset=bundle.data["input_dataset"], output_dataset=bundle.data["output_dataset"]) bundle.obj.save() parent_task = bundle.obj for t_url in bundle.data["ordered_tasks"][1:]: if "aggregator" in t_url: continue temp_task = Task(procedure_url=t_url, parent=parent_task, input_dataset=bundle.data["output_dataset"], output_dataset=bundle.data["output_dataset"]) temp_task.save() parent_task = temp_task #statsd.gauge('outstanding_tasks', 1, delta=True) #statsd.gauge('outstanding_tasks', 1, delta=True) #now that we've created the dependency chain, we will schedule the first task dat = {} dat["task"] = bundle.data["ordered_tasks"][0] dat["task_id"] = bundle.obj.id dat["output_dataset"] = bundle.data["output_dataset"] dat["input_dataset"] = bundle.data["input_dataset"] dat["cassandra_nodes"] = CassandraNode.get_nodeip_list() client = GearmanClient(GearmanNode.get_nodeip_list()) client.submit_job("pre_schedule", pickle.dumps(dat),background=True) return bundle.obj
def videoCreationBatch(): from auth import get_user, is_superuser if not is_superuser(): return action_401() if request.method=="GET": chdir(config.INGEST_DIRECTORY) files=[f for f in listdir(getcwd()) if f[0] != '.'] return json.dumps(files) else: from PIL import Image from shutil import move from helpers import getVideoInfo packet=request.json for up in packet: filepath=unicode(config.INGEST_DIRECTORY + up['filepath']) new_file=unicode(config.MEDIA_DIRECTORY + up['id'] + ".mp4") if path.isfile(new_file): return bundle_400("That file already exists; try another unique ID.") if path.isfile(filepath.encode('utf-8')): md=getVideoInfo(filepath.encode('utf-8')) poster = config.POSTERS_DIRECTORY + "%s.jpg" % (up["id"]) thumb = config.POSTERS_DIRECTORY + "%s_thumb.jpg" % (up["id"]) move(filepath.encode('utf-8'), new_file.encode('utf-8')) assets.update({"_id":up["pid"]},{"$set":{ "@graph.ma:frameRate":float(md["framerate"]), "@graph.ma:averageBitRate":int(float(md["bitrate"])), "@graph.ma:frameWidth":int(md["width"]), "@graph.ma:frameHeight":int(md["height"]), "@graph.ma:duration":int( round(float(md["duration"])) )/60, "@graph.ma:locator": [ { "@id": up["id"], "ma:hasFormat": "video/mp4", "ma:hasCompression": {"@id":"http://www.freebase.com/view/en/h_264_mpeg_4_avc","name": "avc.42E01E"} }, { "@id": up["id"], "ma:hasFormat": "video/webm", "ma:hasCompression": {"@id":"http://www.freebase.com/m/0c02yk5","name":"vp8.0"} } ] }}) imgcmd = "avconv -i '%s' -q:v 1 -r 1 -t 00:00:01 -ss 00:00:30 -f image2 '%s'" % (new_file,poster) system(imgcmd.encode('utf-8')) chmod(poster,0775) im=Image.open(poster) im.thumbnail((160,90)) im.save(thumb) chmod(thumb,0775) if not app.config.get('TESTING'): from gearman import GearmanClient client = GearmanClient(config.GEARMAN_SERVERS) client.submit_job("generate_webm", str(up["id"])) else: from ingest import generate_webm result = generate_webm(file_id=up['id']) if result == "ERROR": raise Exception("Could not convert media file.") return "Success"
def pushSocket(queue): sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) serverAddr = ('0.0.0.0', 28282); sock.bind(serverAddr) send_thread = SendThread(sock, queue) send_thread.start() gm_client = GearmanClient(['localhost:4730']) while True: msg, addr = sock.recvfrom(2048) if not msg: continue #print "recevied:", msg, "from", addr try: data = json.loads(msg) topic = str(data['topic']) data['address'] = addr[0] + ':' + str(addr[1]) except: continue gm_request = gm_client.submit_job(topic, json.dumps(data), background=False, wait_until_complete=False) sock.close()
def test_mailchimp(): 'test mailchimp integration' """ For more info about setting config settings see demisauce/manage.py """ from gearman import GearmanClient from gearman.task import Task gearman_client = GearmanClient(options.gearman_servers) #send emails list_id, mc_apikey = '0','' site = Site.GET(1) assert site.has_attribute('mailchimp_api_key') assert site.has_attribute('mailchimp_listid') list_id = site.get_attribute('mailchimp_listid').value mc_apikey = site.get_attribute('mailchimp_api_key').value jsondict = { 'template_name':'thank_you_for_registering_with_demisauce', 'user':{"email":"*****@*****.**"}, 'mailchimp_listid':list_id, 'mailchimp_api_key':mc_apikey, 'attributes':[{"name":"BetaUsers","category":"event"},{"name":"NewSegment3","category":"event"}] } #'BetaUsers',"NewSegment","NewSegment2" num_sent = gearman_client.do_task(Task("mailchimp_addtolist",json.dumps(jsondict), background=False)) logging.debug("test emailsend num_sent = %s" % (num_sent)) assert num_sent == '1'
def send(self): log.error("in send of email api") emailjson = json.loads(self.request.body) if emailjson and 'template_name' in emailjson: log.error("weehah, body json = %s" % emailjson) #TODO: revamp and use self.db.gearman_client gearman_client = GearmanClient(options.gearman_servers) gearman_client.do_task(Task("email_send",self.request.body, background=True))
def deal(self): self.write('work received<br>') new_client = GearmanClient(['192.168.5.41:4730']) current_request = new_client.submit_job('task_city','heal the world', wait_until_complete=False) new_result = current_request.result print new_result self.write('work finished')
class EmailClient(object): '''邮件发送gearman客户端''' def __init__(self, hosts): self.client = GearmanClient(hosts) def create_message(self, subject, text=None, attachment_path=None, html_path=None): ''' :param msg_content: 消息内容 :param image_path: 图片路径 :param attachment_path: 附件路径 :param html_path: 模板路径 :return: ''' msg = {'subject': subject, 'text': text} if attachment_path: with open(attachment_path, 'rb') as fb: attachment_file = fb.read() msg['attachment_name'] = os.path.basename(attachment_path) msg['attachment'] = attachment_file.encode('base64') if html_path: with open(html_path, 'rb') as fb: html_file = fb.read() msg['html'] = html_file return msg def send(self, email_server, email_server_port, sender, receiver, use_name, pass_word, worker, msg, _id): task_job_list = [] for r in receiver: content = dict(username=use_name, password=pass_word, email_server=email_server, email_server_port=email_server_port, sender=sender, receiver=r, msg=msg, email_id=_id) task_job_list.append(dict(task=worker, data=json.dumps(content))) res = self.client.submit_multiple_jobs(task_job_list, background=True, wait_until_complete=False) return res def check_email_status(self, res): retried_connection_failed_requests = self.client.submit_multiple_requests( res, wait_until_complete=True, poll_timeout=1.0) for r in retried_connection_failed_requests: print r
def send_email(name,user,data): jsondict = { 'template_name':name, 'emails':[user.email], 'template_data':data, 'apikey':options.demisauce_api_key } data = json.dumps(jsondict) url = "%s/api/email/%s/send.json?apikey=%s" % (options.demisauce_url,name,options.demisauce_api_key) gearman_client = GearmanClient(options.gearman_servers) gearman_client.do_task(Task("email_send",data, background=True))
def submit_job(self, force = False): if(self.job_handle and force == False): logging.debug("That job's already been submitted.") else: logging.debug("Submitting job to generate %s" % (self.filename)) client = GearmanClient(settings.GEARMAN_SERVERS) jobdata = json.dumps(self.get_run_parameters()) jobrequest = client.submit_job(self.job_queue, jobdata, background=True) self.job_handle = jobrequest.gearman_job.handle self.job_complete = False self.save()
def run(simulation, run): rivers = simulation.rivers.all() start_point = simulation.start_point end_point = simulation.end_point start_elevation = usgs_elevation(start_point.x, start_point.y) end_elevation = usgs_elevation(end_point.x, end_point.y) river_length = 0 for river in rivers: river_length += river.geom.length # This is dumb, we need to do a proper calculation but for now this is good enough river_length_ft = river_length * 69.1 * 5280 number_of_cross_sections = 1 upstream_width = 30 downstream_width = 30 distance_ft = start_point.distance(end_point) * 69.1 * 5280 channels = [ { 'length' : distance_ft, 'cross_sections' : number_of_cross_sections, 'start_elevation' : start_elevation, 'end_elevation' : end_elevation, 'upstream_width' : upstream_width, 'downstream_width': downstream_width }, ] model_parameters = { 'maxtimesteps' : 80, 'time_step': 300, 'time_weight': 0.6, 'amplitude': 1999.5, 'period': 3.33, 'phase_angle': 1.67, 'start_time': 0.0, 'end_time': 1.667 } run_parameters = { 'channels': channels, 'model_parameters': model_parameters, 'simulation_id': simulation.id, 'run_id': run.id } client = GearmanClient(settings.GEARMAN_SERVERS) jobdata = json.dumps(run_parameters) client.submit_job('fourpt', jobdata, background=True) return True
def doWork_fetchDependencyInfo(params): """ params = { 'projectId':projectId, 'projectPath':projectInfo['projectPath'], 'appName':data['appName'], 'dependencyType':1, } """ client = GearmanClient([GearmanConfig.gearmanConnection]) data = json.dumps(params) request = client.submit_job(JobList.Job_fetchDependencyInfo, data, wait_until_complete=True) return request.result
def obj_create(self, bundle, **kwargs): #raise Exception(bundle.data) obj = Task.objects.get(id=bundle.data["id"]) new_count = 0 try: #obj.tasklet_count += bundle.data["increment"] #statsd.gauge('outstanding_tasks', bundle.data["increment"], delta=True) new_count = obj.tasklet_count + bundle.data["increment"] except KeyError: try: #obj.tasklet_count -= bundle.data["decrement"] #statsd.gauge('outstanding_tasks', -bundle.data["decrement"], delta=True) new_count = obj.tasklet_count - bundle.data["decrement"] except KeyError: pass if new_count <= 0 and obj.tasklet_count == 0: obj.tasklet_count = 0 obj.save() #we already scheduled the next procedure return obj.tasklet_count = new_count obj.save() if obj.tasklet_count == 0: obj.done = True obj.save() #statsd.gauge('outstanding_tasks', -1, delta=True) try: new_task = obj.task_set.all()[0] new_dataset = DatasetMetaResource().get_via_uri(new_task.output_dataset, bundle.request) old_dataset = DatasetMetaResource().get_via_uri(new_task.input_dataset, bundle.request) new_dataset.highest_ts = old_dataset.highest_ts new_dataset.lowest_ts = old_dataset.lowest_ts new_dataset.save() except IndexError: #no more tasks to schedule return dat = {} dat["task"] = new_task.procedure_url dat["task_id"] = new_task.id dat["output_dataset"] = new_task.output_dataset#+"?no_points=True" dat["input_dataset"] = new_task.input_dataset#+"?no_points=True" dat["cassandra_nodes"] = CassandraNode.get_nodeip_list() client = GearmanClient(GearmanNode.get_nodeip_list()) client.submit_job("pre_schedule", pickle.dumps(dat),background=True)
def init(): #init log global g_gearman_client global g_para global g_conf g_conf = ConfigParser.ConfigParser() timestamp = datetime.datetime.now().strftime("%Y-%m-%d") logging.basicConfig( level=logging.DEBUG, format= '%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%Y-%m-%d %H:%M:%S ', filename='../log/rtb_new.log.' + timestamp, filemode='a') #init conf g_conf.read("../conf/rtb_new.conf") #init gearman client gearman_list = g_conf.get("gearman", "client").split(',') g_gearman_client = GearmanClient(gearman_list) g_para = parameter_t() g_para.ip = g_conf.get("gearman", "ip") g_para.timeout = float(g_conf.get("gearman", "timeout")) g_para.svr_name = g_conf.get("gearman", "svr_name") return 0
def generate(image, options, force_creation = False): simulation = image.simulation points = options['points'] for point in points: logging.debug("Point: %d,%d" % (point['x'], point['y'])) natural_width = int(options['naturalWidth']) natural_height = int(options['naturalHeight']) image.channel_width_points = json.dumps(points) image.image_natural_width = natural_width image.image_natural_height = natural_height image.save() geotiff_image = simulation.aerialmap.filname channel_image = simulation.channelmap.filename width_image = simulation.channelwidthmap.filename if (not os.path.isfile(width_image)) or force_creation == True : logging.debug("Channel width map image %s doesn't exist, generating..." % (width_image)) if image.job_handle and force_creation == False: logging.debug("Job handle: %s already exists, not re-queueing" % (image.job_handle)) return None else: run_parameters = { 'channel_image' : simulation.channel_image, 'channel_width_image' : simulation.channel_width_image, 'elevation_map_image' : elevation_image } client = GearmanClient(settings.GEARMAN_SERVERS) jobdata = json.dumps(run_parameters) jobrequest = client.submit_job('elevation_map', jobdata, background=True) simulation.elevation_map_job_handle = jobrequest.gearman_job.handle simulation.elevation_map_job_complete = False simulation.save() return None else: img = Image.open(width_image) return img
def send_request(req): new_client = GearmanClient([req.IP]) s = time.time() request_dict={} request_dict["header"] = req.header request_dict["request"]= req.request if "pack_in" in req.params and req.params["pack_in"] == "0": current_request = new_client.submit_job(req.worker,request_dict) else: current_request = new_client.submit_job(req.worker,msgpack.packb(request_dict)) if "pack_out" in req.params and req.params["pack_out"] == "0": current_result = current_request.result else: current_result = msgpack.unpackb(current_request.result) e = time.time() print "using time:%f" % (e-s) return current_result
def get_gearman_status(job_handle): try: # Query gearmand client = GearmanClient(settings.GEARMAN_SERVERS) client.establish_connection(client.connection_list[0]) # configure the job to request status for - the last four is not needed for Status requests. j = gearman.job.GearmanJob(client.connection_list[0], str(job_handle), None, None, None) # create a job request jr = gearman.job.GearmanJobRequest(j) jr.state = 'CREATED' # request the state from gearmand res = client.get_job_status(jr) # the res structure should now be filled with the status information about the task return res except: print "Unexpected error:", sys.exc_info()[0] return -1
class TestGearman(unittest.TestCase): def setUp(self): self.last_exception = (None, None) self.worker = GearmanWorker(job_servers) self.worker.register_function("echo", echo) self.worker.register_function("fail", fail) self.worker.register_function("sleep", sleep, timeout=1) class Hooks(object): @staticmethod def start(job): pass @staticmethod def complete(job, res): pass @staticmethod def fail(job, exc): self.last_exception = (job.func, exc) import thread thread.start_new_thread(self.worker.work, tuple(), dict(hooks=Hooks)) # TODO: Shouldn't use threads.. but we do for now (also, the thread is never terminated) self.client = GearmanClient(job_servers) def tearDown(self): del self.worker del self.client def testComplete(self): self.failUnlessEqual(self.client.do_task(Task("echo", "bar")), 'bar') def testFail(self): self.failUnlessRaises(self.client.TaskFailed, lambda:self.client.do_task(Task("fail", "bar"))) # self.failUnlessEqual(self.last_exception[0], "fail") def testTimeout(self): self.failUnlessEqual(self.client.do_task(Task("sleep", "0.1")), '0.1') self.failUnlessRaises(self.client.TaskFailed, lambda:self.client.do_task(Task("sleep", "1.5"))) def testCall(self): self.failUnlessEqual(self.client("echo", "bar"), 'bar')
def setUp(self): self.start_server() self.last_exception = (None, None) self.worker = GearmanWorker(job_servers) self.worker.register_function("echo", echo) self.worker.register_function("fail", fail) self.worker.register_function("sleep", sleep, timeout=1) self.worker.register_class(ObjectWorker()) self.worker.register_class(ClassWorker()) class Hooks(object): @staticmethod def start(job): pass @staticmethod def complete(job, res): pass @staticmethod def fail(job, exc): self.last_exception = (job.func, exc) import thread self.worker_thread = thread.start_new_thread(self.worker.work, tuple(), dict(hooks=Hooks)) # TODO: Shouldn't use threads.. but we do for now (also, the thread is never terminated) self.client = GearmanClient(job_servers)
def generate(simulation_id, force_creation = False): simulation = Simulation.objects.get(pk = simulation_id) geotiff_image = simulation.aerial_geotiff channel_image = simulation.channel_image if (not os.path.isfile(channel_image)) or force_creation == True: if simulation.channel_tile_job_handle and force_creation == False: logging.debug("Job handle: %s already exists, not re-queueing" % (simulation.channel_tile_job_handle)) return None else: logging.debug("Channel image %s doesn't exist, generating..." % (channel_image)) run_parameters = { 'tile_path': settings.RIVER_TILES_PATH, 'geotiff_image': geotiff_image, 'channel_image': channel_image, 'ortho_tiles': [tile.tile for tile in simulation.get_ortho_tiles()], 'tile_width': 5000, 'tile_height': 5000 } client = GearmanClient(settings.GEARMAN_SERVERS) jobdata = json.dumps(run_parameters) jobrequest = client.submit_job('channel_image', jobdata, background=True) simulation.channel_tile_job_handle = jobrequest.gearman_job.handle simulation.channel_tile_job_complete = False simulation.save() return None else: img = Image.open(channel_image) return img
def download(request): if "folder" not in request.GET or "title" not in request.GET or "url" not in request.GET: return HttpResponse("failed to enqueue, one or more params missing") folder = request.GET["folder"] name = request.GET["title"] video_url = request.GET["url"] if folder =='' or name =='' or video_url=='': return HttpResponse("failed to enqueue") info = Info() video_id = info.add_to_queue(folder,video_url,name) #outtmpl="%s/%s" % (folder,name) folder="%s/%s" %(settings.DOWNLOAD_PATH,folder) d = {'folder':folder,'name':name,'url':video_url,'id':video_id} client = GearmanClient(["127.0.0.1"]) res = client.dispatch_background_task("download", d) return HttpResponse("enqueued video for successfully")
def stash_file(base64file,filename=None,gearman_client=None,args={}): """Accepts file handle from http upload, stashes, creates gearman worker""" new_file = ''.join([random.choice(string.letters + string.digits) for i in range(15)]) if filename == None: extension = ".jpg" else: extension = re.search('\.\w+',filename).group() new_path = '%s/%s' % (random.choice(string.ascii_lowercase),random.choice(string.ascii_lowercase)) # two folders relative_path_wfile = '%s/%s' % (new_path,new_file) #local_path_wfile = '%s/%s%s' % (path,new_file,extension) if not gearman_client: gearman_client = GearmanClient(options.gearman_servers) json_data = { 'file':new_file, 'args':args, 'extension':extension, 'path':new_path, 'path_w_file':relative_path_wfile, 'url':'%sstatic/upload/%s%s' % (options.base_url,relative_path_wfile,extension), 'image':base64file } gearman_client.do_task(Task("image_resize",json.dumps(json_data), background=True)) return relative_path_wfile
def download(request): if "folder" not in request.GET or "title" not in request.GET or "url" not in request.GET: return HttpResponse("failed to enqueue, one or more params missing") folder = request.GET["folder"] name = request.GET["title"] video_url = request.GET["url"] if folder == '' or name == '' or video_url == '': return HttpResponse("failed to enqueue") info = Info() video_id = info.add_to_queue(folder, video_url, name) #outtmpl="%s/%s" % (folder,name) folder = "%s/%s" % (settings.DOWNLOAD_PATH, folder) d = {'folder': folder, 'name': name, 'url': video_url, 'id': video_id} client = GearmanClient(["127.0.0.1"]) res = client.dispatch_background_task("download", d) return HttpResponse("enqueued video for successfully")
class TestGearman(GearmanTestCase): def setUp(self): self.start_server() self.last_exception = (None, None) self.worker = GearmanWorker(job_servers) self.worker.register_function("echo", echo) self.worker.register_function("fail", fail) self.worker.register_function("sleep", sleep, timeout=1) self.worker.register_class(ObjectWorker()) self.worker.register_class(ClassWorker()) class Hooks(object): @staticmethod def start(job): pass @staticmethod def complete(job, res): pass @staticmethod def fail(job, exc): self.last_exception = (job.func, exc) import thread self.worker_thread = thread.start_new_thread(self.worker.work, tuple(), dict(hooks=Hooks)) # TODO: Shouldn't use threads.. but we do for now (also, the thread is never terminated) self.client = GearmanClient(job_servers) def tearDown(self): del self.worker del self.client self.stop_server() def testComplete(self): self.failUnlessEqual(self.client.do_task(Task("echo", "bar")), 'bar') def testFail(self): self.failUnlessRaises(self.client.TaskFailed, lambda:self.client.do_task(Task("fail", "bar"))) # self.failUnlessEqual(self.last_exception[0], "fail") def testCompleteAfterFail(self): self.failUnlessRaises(self.client.TaskFailed, lambda:self.client.do_task(Task("fail", "bar"))) self.failUnlessEqual(self.client.do_task(Task("echo", "bar")), 'bar') def testTimeout(self): self.failUnlessEqual(self.client.do_task(Task("sleep", "0.1")), '0.1') self.failUnlessRaises(self.client.TaskFailed, lambda:self.client.do_task(Task("sleep", "1.5"))) def testCall(self): self.failUnlessEqual(self.client("echo", "bar"), 'bar') def testObjectWorker(self): self.failUnlessEqual(self.client("ObjectWorker.echo", "foo"), "foo") def testClassWorker(self): self.failUnlessEqual(self.client("ClassWorker.echo", "foo"), "foo")
def setUp(self): self.last_exception = (None, None) self.worker = GearmanWorker(job_servers) self.worker.register_function("echo", echo) self.worker.register_function("fail", fail) self.worker.register_function("sleep", sleep, timeout=1) class Hooks(object): @staticmethod def start(job): pass @staticmethod def complete(job, res): pass @staticmethod def fail(job, exc): self.last_exception = (job.func, exc) import thread thread.start_new_thread(self.worker.work, tuple(), dict(hooks=Hooks)) # TODO: Shouldn't use threads.. but we do for now (also, the thread is never terminated) self.client = GearmanClient(job_servers)
def __init__(self, host_list=None): GearmanClient.__init__(self, host_list=host_list)
#!/usr/bin/env python import string from PIL import Image from gearman import GearmanClient gearman_client = GearmanClient(['ftester.chinacloudapp.cn:4730']) # gearman_client = GearmanClient(['172.31.1.92:4730']) path = './sample_images/tubingen.jpg' data = open(path, 'rb').read() ljust = string.ljust('kandinsky_e2_crop512', 100, ' ') data = ljust + data gearman_request = gearman_client.submit_job('test', data) result_data = gearman_request.result with open('test.jpg', 'w+') as pf: pf.write(result_data) #pf.write(data[10:])
video_end_parser.add_argument('last_segment_id', type=long, location='form') # path resolution root_dir = os.path.dirname(os.path.abspath(__file__)) upload_path = os.path.join(root_dir, DIR_SEGMENT_UPLOADED) transcode_path = os.path.join(root_dir, DIR_SEGMENT_TRANSCODED) # ensure the directory exists if not os.path.exists(upload_path): os.makedirs(upload_path) if not os.path.exists(transcode_path): os.makedirs(transcode_path) # gearman job queue gm_client = GearmanClient([GEARMAND_HOST_PORT]) # importing pickle try: import cPickle as pickle except: import pickle class VideoResource(Resource): @marshal_with(video_fields) def get(self, video_id): video = session \ .query(Video) \ .filter(Video.video_id == video_id) \ .first()
def __init__(self, hosts): self.client = GearmanClient(hosts)
import sys from django.conf import settings from gearman import Task, GearmanWorker, GearmanClient from gearman.connection import GearmanConnection from gearman.task import Taskset workers = dict() client = settings.GEARMAN_SERVERS and GearmanClient( settings.GEARMAN_SERVERS) or None def register_worker(id): def register(worker): workers[id] = worker return worker return register def discover_workers(): if not workers: for app in settings.INSTALLED_APPS: try: module = __import__(app + ".workers") except ImportError: pass def create_worker(): discover_workers()
def send_request(workername, host, request, packType="msgpack"): client = GearmanClient(host) response = client.submit_job(workername, packRequest(request, packType)) result = unpackResponse(response.result) client.shutdown() return result
#!/usr/bin/python # TODO: Initialize Logging # TODO: Direct log to file # TODO: Use Gearman Configuration files from gearman import GearmanClient topnrelay = GearmanClient(['localhost:4730']) # TODO: Use Redis Configuration files import redis redis_server = redis.Redis('localhost') if __name__ == "__main__": # TODO: We can have multiple clients by allowing for an extended pattern say "tbp_a*" - <all words to be processed starting with a> # TODO: This can be config driven based on the scale that has to be achieved tbp_keys = redis_server.keys(pattern='tbp_*') for word in tbp_keys: # TODO: Most frequent words can be sent into the top N compute queue with higher priority # TODO: Log the call result = topnrelay.submit_job('topncompute', word, background=True, wait_until_complete=False)
from gearman import GearmanClient import simplejson # create a client that will connect to the Gearman server running on # localhost. The array allows you to specify a set of job servers. #client = GearmanClient(['impinj-pc-0461']) client = GearmanClient(['localhost:4730']) # Submit a synchronous job request to the job server and store the print 'Sending job...' # This runs the "echo" job on the argument "foo" #request = client.submit_job('reverse', 'hello') #jenkins_data = {'url':"http://localhost:8080",'jobId':'burnit'} #jenkins_burnit_status = {'url':"http://localhost:8080",'jobId':'burnit'} #info_json = "{'url':'baba','job_id':'didi'}" #request = client.submit_job('jenkins_invoke_job', simplejson.dumps(jenkins_data)) #request = client.submit_job('org.gearman.example.EchoFunction', "mama", poll_timeout=5) #request = client.submit_job('echo', "mama", poll_timeout=5) #request = client.submit_job('echo', "mama") #request = client.submit_job('bravo', "mama") #print request.result #request = client.submit_job('reverse', "baba", poll_timeout=5) #request = client.submit_job('reverse', "baba") #print request.result #request = client.submit_job('JenkinsJobStatus', "burnit", poll_timeout=5) #client.shutdown() #request = client.submit_job('org.gearman.example.JenkinsInvokeJob', "wet", poll_timeout=5) #request = client.submit_job('org.gearman.example.JenkinsJobStatus', "burnit", poll_timeout=5)
#!/usr/bin/env python #coding:utf8 # Author : tuxpy # Email : [email protected] # Last modified : 2014-08-29 11:52:16 # Filename : gearman_client.py # Description : from gearman import GearmanClient new_client = GearmanClient(["192.168.8.116:1234"]) fd = open("亮剑.txt", 'r') line = fd.readline() request = new_client.submit_job('pinyin', line) print dir(request)
#!/usr/bin/env python #coding:utf8 # Author : tuxpy # Email : [email protected] # Last modified : 2014-08-29 11:52:16 # Filename : gearman_client.py # Description : from gearman import GearmanClient new_client=GearmanClient(["192.168.8.116:1234"]) fd=open("亮剑.txt",'r') line=fd.readline() request=new_client.submit_job('pinyin',line) print dir(request)
def send_request(WORKERNAME, HOST, POST, REQUEST): client = GearmanClient([HOST+":"+str(POST)]) response = client.submit_job(WORKERNAME,packRequest(REQUEST)) result = unpackResponse(response.result) return result
def doWork_packageByPackageInfo(packageInfo): client = GearmanClient([GearmanConfig.gearmanConnection]) data = json.dumps(packageInfo) request = client.submit_job(JobList.Job_package, data, wait_until_complete=False) pass
from gearman import GearmanClient import simplejson import uuid import time # create a client that will connect to the Gearman server running on # localhost. The array allows you to specify a set of job servers. #client = GearmanClient(['localhost:4730']) client = GearmanClient(['15.185.117.66:4730']) # Submit a synchronous job request to the job server and store the print 'Sending job...' build_id = uuid.uuid4().hex print build_id jenkins_build_params = {'uuid':build_id,'param2':"true",'param3':'bingo'} request = client.submit_job('build:kiwi:centos', simplejson.dumps(jenkins_build_params), poll_timeout=60, unique=build_id) print request.result print 'Work complete with state %s' % request.state
import time from gearman import GearmanClient, Task client = GearmanClient(["127.0.0.1"]) for i in range(5): client.dispatch_background_task('speak', i) print 'Dispatched %d' % i time.sleep(1)
def os_install(server, install_param): gearman_client = GearmanClient(['%s:7070'] % server) gearman_client.submit_job('install', json.dumps(install_param), background=True) return True
#!/usr/bin/env python2.6 #!/usr/bin/env python2.7 # coding=utf-8 from gearman import GearmanClient gearman_client = GearmanClient(['127.0.0.1:4730']) gearman_request = gearman_client.submit_job('echo', 'test gearman') result_data = gearman_request.result print result_data
#!/usr/bin/env python #coding:utf8 # Author : tuxpy # Email : [email protected] # Last modified : 2014-08-28 16:46:33 # Filename : tw_gearman.py # Description : from twisted.internet import reactor from twisted.web.server import Site from twisted.web.resource import Resource, ErrorPage from template import render from gearman import GearmanClient temp = render('temp/') new_client = GearmanClient(["192.168.8.116:1234"]) class PinYin(Resource): def getChild(self, name, request): return self def render_GET(self, request): return temp.pinyin() def render_POST(self, request): line = request.args["words"][0] return new_client.submit_job('pinyin', line).result site = Site(PinYin())
def set_new_file(video_id, new_id, filepath): ''' Returns a tuple with the first value T/F, and the second, if False, the relevant message. ''' from helpers import getVideoInfo from PIL import Image from shutil import move up = {'id': new_id, 'filepath': filepath, 'pid': video_id} i = up['id'] dupe = assets.find_one({'@graph.ma:locator': {'$elemMatch': {'@id': i}}}) mp4 = (unicode(config.MEDIA_DIRECTORY) + i + '.mp4').encode('utf-8') webm = (unicode(config.MEDIA_DIRECTORY) + i + '.webm').encode('utf-8') if path.isfile(webm) or path.isfile(mp4) or dupe is not None: return (False, "That file (%s) already exists; try another unique ID." %i) filepath=unicode(config.INGEST_DIRECTORY + up['filepath']) new_file=unicode(config.MEDIA_DIRECTORY + up['id'] + ".mp4") md=getVideoInfo(filepath.encode('utf-8')) poster = config.POSTERS_DIRECTORY + "%s.jpg" % (up["id"]) thumb = config.POSTERS_DIRECTORY + "%s_thumb.jpg" % (up["id"]) move(filepath.encode('utf-8'), new_file.encode('utf-8')) assets.update({"_id":up["pid"]},{"$set":{ "@graph.ma:frameRate":float(md["framerate"]), "@graph.ma:averageBitRate":int(float(md["bitrate"])), "@graph.ma:frameWidth":int(md["width"]), "@graph.ma:frameHeight":int(md["height"]), "@graph.ma:duration":int( round(float(md["duration"])) )/60, "@graph.ma:locator": [ { "@id": up["id"], "ma:hasFormat": "video/mp4", "ma:hasCompression": {"@id":"http://www.freebase.com/view/en/h_264_mpeg_4_avc","name": "avc.42E01E"} }, { "@id": up["id"], "ma:hasFormat": "video/webm", "ma:hasCompression": {"@id":"http://www.freebase.com/m/0c02yk5","name":"vp8.0"} } ] }}) imgcmd = "avconv -i '%s' -q:v 1 -r 1 -t 00:00:01 -ss 00:00:30 -f image2 '%s'" % (new_file,poster) system(imgcmd.encode('utf-8')) chmod(poster,0775) im=Image.open(poster) im.thumbnail((160,90)) im.save(thumb) chmod(thumb,0775) if not app.config.get('TESTING'): from gearman import GearmanClient client = GearmanClient(config.GEARMAN_SERVERS) client.submit_job("generate_webm", str(up["id"])) else: from ingest import generate_webm result = generate_webm(file_id=up['id']) if result == "ERROR": raise Exception("Could not convert media file.") return (True,)
def get_urls(db): urls = [] con = sqlite3.connect(db) for tb in CATES: sql = "select * from %s limit %d" % (tb, TRAIN_URLS) rows = con.execute(sql) urls.extend(["%s\t%s" % (row[0].encode("utf-8"), tb) for row in rows]) pass return urls if __name__ == "__main__": urls = get_urls(URLS_DB) client = GearmanClient(["10.61.0.145"]) tasks = Taskset() TASK_URLS_NUM = 100 # disptribute task i = 0 while i < len(urls): sub_urls = urls[i : i + TASK_URLS_NUM] workload = "\n".join(sub_urls) t = Task1("crawl", workload, str(i), timeout=TASK1_TIMEOUT, retry_count=1) tasks.add(t) print "add task:%s" % t.uniq i += TASK_URLS_NUM # test pass # 0.init database for return result from worker
def os_install_action(install_param): gearman_client = GearmanClient(['10.1.102.50:7070']) gearman_client.submit_job('rsync', json.dumps(install_param), background=True) return True
from gearman import GearmanClient new_client = GearmanClient(['www.aispring.top:4730']) current_request = new_client.submit_job('echo', 'foo') new_result = current_request.result print new_result
def get_urls(db): urls = [] con = sqlite3.connect(db) for tb in CATES: sql = 'select * from %s limit %d' % (tb, TRAIN_URLS) rows = con.execute(sql) urls.extend(['%s\t%s' % (row[0].encode('utf-8'), tb) for row in rows]) pass return urls if __name__ == '__main__': urls = get_urls(URLS_DB) client = GearmanClient(['10.61.0.145']) tasks = Taskset() TASK_URLS_NUM = 100 # disptribute task i = 0 while i < len(urls): sub_urls = urls[i:i + TASK_URLS_NUM] workload = '\n'.join(sub_urls) t = Task1('crawl', workload, str(i), timeout=TASK1_TIMEOUT, retry_count=1) tasks.add(t) print "add task:%s" % t.uniq i += TASK_URLS_NUM
#!/usr/bin/python import time, os, sys from watchdog.observers import Observer from watchdog.events import FileSystemEventHandler from gearman import GearmanClient # TODO: Initialize Logging # TODO: Direct log to file # TODO: Gearman configuration file to be used relay = GearmanClient(['localhost:4730']) class MyHandler(FileSystemEventHandler): ''' Extending the base FileSystemEventHandler to relay to Gearman workers ''' def on_created(self, event): print event.src_path, event.is_directory, event.event_type # TODO: Write above to log file if not (event.is_directory): # TODO: Write relayed to log file result = relay.submit_job('invindex', event.src_path, background=True, wait_until_complete=False) # TODO: Write completed to log file if __name__ == "__main__": # TODO: Daemonize