def pushSocket(queue): sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) serverAddr = ('0.0.0.0', 28282); sock.bind(serverAddr) send_thread = SendThread(sock, queue) send_thread.start() gm_client = GearmanClient(['localhost:4730']) while True: msg, addr = sock.recvfrom(2048) if not msg: continue #print "recevied:", msg, "from", addr try: data = json.loads(msg) topic = str(data['topic']) data['address'] = addr[0] + ':' + str(addr[1]) except: continue gm_request = gm_client.submit_job(topic, json.dumps(data), background=False, wait_until_complete=False) sock.close()
def analysis_address(row): out_row = [] input_id = [] input_addr = row[1].encode("utf-8") api_output_id = [] api_output_addr = [] client = GearmanClient([GEARMAN_API]) job_request = client.submit_job(FUNCTION, input_addr) loaded_json = json.loads(job_request.result) for addr in loaded_json: api_output_id.append(addr['id']) api_output_addr.append(addr['string']) if (row[6] != ''): input_id = row[6].strip().split(" ") input_id = list(map(int, input_id)) compare_list = set(input_id) ^ set(api_output_id) different = len(compare_list) str_api_output_addr = ', '.join(api_output_addr) str_api_output_id = ' '.join(str(e) for e in api_output_id) out_row = [ row[0], row[1], row[2], str_api_output_addr, row[6], str_api_output_id, different ] return out_row
def init(): #init log global g_gearman_client global g_para global g_conf g_conf = ConfigParser.ConfigParser() timestamp = datetime.datetime.now().strftime("%Y-%m-%d") logging.basicConfig( level=logging.DEBUG, format= '%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%Y-%m-%d %H:%M:%S ', filename='../log/rtb_new.log.' + timestamp, filemode='a') #init conf g_conf.read("../conf/rtb_new.conf") #init gearman client gearman_list = g_conf.get("gearman", "client").split(',') g_gearman_client = GearmanClient(gearman_list) g_para = parameter_t() g_para.ip = g_conf.get("gearman", "ip") g_para.timeout = float(g_conf.get("gearman", "timeout")) g_para.svr_name = g_conf.get("gearman", "svr_name") return 0
def setUp(self): self.start_server() self.last_exception = (None, None) self.worker = GearmanWorker(job_servers) self.worker.register_function("echo", echo) self.worker.register_function("fail", fail) self.worker.register_function("sleep", sleep, timeout=1) self.worker.register_class(ObjectWorker()) self.worker.register_class(ClassWorker()) class Hooks(object): @staticmethod def start(job): pass @staticmethod def complete(job, res): pass @staticmethod def fail(job, exc): self.last_exception = (job.func, exc) import thread self.worker_thread = thread.start_new_thread(self.worker.work, tuple(), dict(hooks=Hooks)) # TODO: Shouldn't use threads.. but we do for now (also, the thread is never terminated) self.client = GearmanClient(job_servers)
def download(request): if "folder" not in request.GET or "title" not in request.GET or "url" not in request.GET: return HttpResponse("failed to enqueue, one or more params missing") folder = request.GET["folder"] name = request.GET["title"] video_url = request.GET["url"] if folder == '' or name == '' or video_url == '': return HttpResponse("failed to enqueue") info = Info() video_id = info.add_to_queue(folder, video_url, name) #outtmpl="%s/%s" % (folder,name) folder = "%s/%s" % (settings.DOWNLOAD_PATH, folder) d = {'folder': folder, 'name': name, 'url': video_url, 'id': video_id} client = GearmanClient(["127.0.0.1"]) res = client.dispatch_background_task("download", d) return HttpResponse("enqueued video for successfully")
def set_new_file(video_id, new_id, filepath): ''' Returns a tuple with the first value T/F, and the second, if False, the relevant message. ''' from helpers import getVideoInfo from PIL import Image from shutil import move up = {'id': new_id, 'filepath': filepath, 'pid': video_id} i = up['id'] dupe = assets.find_one({'@graph.ma:locator': {'$elemMatch': {'@id': i}}}) mp4 = (unicode(config.MEDIA_DIRECTORY) + i + '.mp4').encode('utf-8') webm = (unicode(config.MEDIA_DIRECTORY) + i + '.webm').encode('utf-8') if path.isfile(webm) or path.isfile(mp4) or dupe is not None: return (False, "That file (%s) already exists; try another unique ID." %i) filepath=unicode(config.INGEST_DIRECTORY + up['filepath']) new_file=unicode(config.MEDIA_DIRECTORY + up['id'] + ".mp4") md=getVideoInfo(filepath.encode('utf-8')) poster = config.POSTERS_DIRECTORY + "%s.jpg" % (up["id"]) thumb = config.POSTERS_DIRECTORY + "%s_thumb.jpg" % (up["id"]) move(filepath.encode('utf-8'), new_file.encode('utf-8')) assets.update({"_id":up["pid"]},{"$set":{ "@graph.ma:frameRate":float(md["framerate"]), "@graph.ma:averageBitRate":int(float(md["bitrate"])), "@graph.ma:frameWidth":int(md["width"]), "@graph.ma:frameHeight":int(md["height"]), "@graph.ma:duration":int( round(float(md["duration"])) )/60, "@graph.ma:locator": [ { "@id": up["id"], "ma:hasFormat": "video/mp4", "ma:hasCompression": {"@id":"http://www.freebase.com/view/en/h_264_mpeg_4_avc","name": "avc.42E01E"} }, { "@id": up["id"], "ma:hasFormat": "video/webm", "ma:hasCompression": {"@id":"http://www.freebase.com/m/0c02yk5","name":"vp8.0"} } ] }}) imgcmd = "avconv -i '%s' -q:v 1 -r 1 -t 00:00:01 -ss 00:00:30 -f image2 '%s'" % (new_file,poster) system(imgcmd.encode('utf-8')) chmod(poster,0775) im=Image.open(poster) im.thumbnail((160,90)) im.save(thumb) chmod(thumb,0775) if not app.config.get('TESTING'): from gearman import GearmanClient client = GearmanClient(config.GEARMAN_SERVERS) client.submit_job("generate_webm", str(up["id"])) else: from ingest import generate_webm result = generate_webm(file_id=up['id']) if result == "ERROR": raise Exception("Could not convert media file.") return (True,)
import sys from django.conf import settings from gearman import Task, GearmanWorker, GearmanClient from gearman.connection import GearmanConnection from gearman.task import Taskset workers = dict() client = settings.GEARMAN_SERVERS and GearmanClient( settings.GEARMAN_SERVERS) or None def register_worker(id): def register(worker): workers[id] = worker return worker return register def discover_workers(): if not workers: for app in settings.INSTALLED_APPS: try: module = __import__(app + ".workers") except ImportError: pass def create_worker(): discover_workers()
#!/usr/bin/env python2.6 #!/usr/bin/env python2.7 # coding=utf-8 from gearman import GearmanClient gearman_client = GearmanClient(['127.0.0.1:4730']) gearman_request = gearman_client.submit_job('echo', 'test gearman') result_data = gearman_request.result print result_data
def send_request(WORKERNAME, HOST, POST, REQUEST): client = GearmanClient([HOST+":"+str(POST)]) response = client.submit_job(WORKERNAME,packRequest(REQUEST)) result = unpackResponse(response.result) return result
def send_request(workername, host, request, packType="msgpack"): client = GearmanClient(host) response = client.submit_job(workername, packRequest(request, packType)) result = unpackResponse(response.result) client.shutdown() return result
video_end_parser.add_argument('last_segment_id', type=long, location='form') # path resolution root_dir = os.path.dirname(os.path.abspath(__file__)) upload_path = os.path.join(root_dir, DIR_SEGMENT_UPLOADED) transcode_path = os.path.join(root_dir, DIR_SEGMENT_TRANSCODED) # ensure the directory exists if not os.path.exists(upload_path): os.makedirs(upload_path) if not os.path.exists(transcode_path): os.makedirs(transcode_path) # gearman job queue gm_client = GearmanClient([GEARMAND_HOST_PORT]) # importing pickle try: import cPickle as pickle except: import pickle class VideoResource(Resource): @marshal_with(video_fields) def get(self, video_id): video = session \ .query(Video) \ .filter(Video.video_id == video_id) \ .first()
#!/usr/bin/env python #coding:utf8 # Author : tuxpy # Email : [email protected] # Last modified : 2014-08-28 16:46:33 # Filename : tw_gearman.py # Description : from twisted.internet import reactor from twisted.web.server import Site from twisted.web.resource import Resource, ErrorPage from template import render from gearman import GearmanClient temp = render('temp/') new_client = GearmanClient(["192.168.8.116:1234"]) class PinYin(Resource): def getChild(self, name, request): return self def render_GET(self, request): return temp.pinyin() def render_POST(self, request): line = request.args["words"][0] return new_client.submit_job('pinyin', line).result site = Site(PinYin())
from gearman import GearmanClient new_client = GearmanClient(['www.aispring.top:4730']) current_request = new_client.submit_job('echo', 'foo') new_result = current_request.result print new_result
#!/usr/bin/python # TODO: Initialize Logging # TODO: Direct log to file # TODO: Use Gearman Configuration files from gearman import GearmanClient topnrelay = GearmanClient(['localhost:4730']) # TODO: Use Redis Configuration files import redis redis_server = redis.Redis('localhost') if __name__ == "__main__": # TODO: We can have multiple clients by allowing for an extended pattern say "tbp_a*" - <all words to be processed starting with a> # TODO: This can be config driven based on the scale that has to be achieved tbp_keys = redis_server.keys(pattern='tbp_*') for word in tbp_keys: # TODO: Most frequent words can be sent into the top N compute queue with higher priority # TODO: Log the call result = topnrelay.submit_job('topncompute', word, background=True, wait_until_complete=False)
from gearman import GearmanClient import simplejson # create a client that will connect to the Gearman server running on # localhost. The array allows you to specify a set of job servers. #client = GearmanClient(['impinj-pc-0461']) client = GearmanClient(['localhost:4730']) # Submit a synchronous job request to the job server and store the print 'Sending job...' # This runs the "echo" job on the argument "foo" #request = client.submit_job('reverse', 'hello') #jenkins_data = {'url':"http://localhost:8080",'jobId':'burnit'} #jenkins_burnit_status = {'url':"http://localhost:8080",'jobId':'burnit'} #info_json = "{'url':'baba','job_id':'didi'}" #request = client.submit_job('jenkins_invoke_job', simplejson.dumps(jenkins_data)) #request = client.submit_job('org.gearman.example.EchoFunction', "mama", poll_timeout=5) #request = client.submit_job('echo', "mama", poll_timeout=5) #request = client.submit_job('echo', "mama") #request = client.submit_job('bravo', "mama") #print request.result #request = client.submit_job('reverse', "baba", poll_timeout=5) #request = client.submit_job('reverse', "baba") #print request.result #request = client.submit_job('JenkinsJobStatus', "burnit", poll_timeout=5) #client.shutdown() #request = client.submit_job('org.gearman.example.JenkinsInvokeJob', "wet", poll_timeout=5) #request = client.submit_job('org.gearman.example.JenkinsJobStatus', "burnit", poll_timeout=5)
def get_urls(db): urls = [] con = sqlite3.connect(db) for tb in CATES: sql = 'select * from %s limit %d' % (tb, TRAIN_URLS) rows = con.execute(sql) urls.extend(['%s\t%s' % (row[0].encode('utf-8'), tb) for row in rows]) pass return urls if __name__ == '__main__': urls = get_urls(URLS_DB) client = GearmanClient(['10.61.0.145']) tasks = Taskset() TASK_URLS_NUM = 100 # disptribute task i = 0 while i < len(urls): sub_urls = urls[i:i + TASK_URLS_NUM] workload = '\n'.join(sub_urls) t = Task1('crawl', workload, str(i), timeout=TASK1_TIMEOUT, retry_count=1) tasks.add(t) print "add task:%s" % t.uniq i += TASK_URLS_NUM
def __init__(self, hosts): self.client = GearmanClient(hosts)