Esempio n. 1
0
def videoCreationBatch():
    from auth import get_user, is_superuser
    if not is_superuser():
        return action_401()
    if request.method=="GET":
        chdir(config.INGEST_DIRECTORY)
        files=[f for f in listdir(getcwd()) if f[0] != '.']
        return json.dumps(files)
    else:
        from PIL import Image
        from shutil import move
        from helpers import getVideoInfo
        packet=request.json
        for up in packet:
            filepath=unicode(config.INGEST_DIRECTORY + up['filepath'])
	    new_file=unicode(config.MEDIA_DIRECTORY + up['id'] + ".mp4")
	    if path.isfile(new_file):
		return bundle_400("That file already exists; try another unique ID.")
            if path.isfile(filepath.encode('utf-8')):
                md=getVideoInfo(filepath.encode('utf-8'))
                poster = config.POSTERS_DIRECTORY + "%s.jpg" % (up["id"])
                thumb = config.POSTERS_DIRECTORY + "%s_thumb.jpg" % (up["id"])
                move(filepath.encode('utf-8'), new_file.encode('utf-8'))
                assets.update({"_id":up["pid"]},{"$set":{
                    "@graph.ma:frameRate":float(md["framerate"]),
                    "@graph.ma:averageBitRate":int(float(md["bitrate"])),
                    "@graph.ma:frameWidth":int(md["width"]),
                    "@graph.ma:frameHeight":int(md["height"]),
                    "@graph.ma:duration":int( round(float(md["duration"])) )/60,
                    "@graph.ma:locator": [
                        {
                            "@id": up["id"],
                            "ma:hasFormat": "video/mp4",
                            "ma:hasCompression": {"@id":"http://www.freebase.com/view/en/h_264_mpeg_4_avc","name": "avc.42E01E"}
                        },
                        {
                            "@id": up["id"],
                            "ma:hasFormat": "video/webm",
                            "ma:hasCompression": {"@id":"http://www.freebase.com/m/0c02yk5","name":"vp8.0"}
                        }
                    ]
                }})
                imgcmd = "avconv -i '%s' -q:v 1 -r 1 -t 00:00:01 -ss 00:00:30 -f image2 '%s'" % (new_file,poster)
                system(imgcmd.encode('utf-8'))
                chmod(poster,0775)
                im=Image.open(poster)
                im.thumbnail((160,90))
                im.save(thumb)
                chmod(thumb,0775)
                
                if not app.config.get('TESTING'):
                    from gearman import GearmanClient
                    client = GearmanClient(config.GEARMAN_SERVERS)
                    client.submit_job("generate_webm", str(up["id"]))
                else:
                    from ingest import generate_webm
                    result = generate_webm(file_id=up['id'])
                    if result == "ERROR":
                      raise Exception("Could not convert media file.")
	return "Success"
Esempio n. 2
0
    def obj_create(self, bundle, **kwargs):
        bundle.obj = Task(procedure_url=bundle.data["ordered_tasks"][0], 
                        input_dataset=bundle.data["input_dataset"], output_dataset=bundle.data["output_dataset"])
        
        bundle.obj.save()
        
        parent_task = bundle.obj
        for t_url in bundle.data["ordered_tasks"][1:]:
            if "aggregator" in t_url:
                continue
            temp_task = Task(procedure_url=t_url, parent=parent_task,
                                input_dataset=bundle.data["output_dataset"], output_dataset=bundle.data["output_dataset"])
            temp_task.save()
            parent_task = temp_task
            #statsd.gauge('outstanding_tasks', 1, delta=True)

        #statsd.gauge('outstanding_tasks', 1, delta=True)

        #now that we've created the dependency chain, we will schedule the first task
        dat = {}
        dat["task"] = bundle.data["ordered_tasks"][0]
        dat["task_id"] = bundle.obj.id
        dat["output_dataset"] = bundle.data["output_dataset"]
        dat["input_dataset"] = bundle.data["input_dataset"]
        dat["cassandra_nodes"] = CassandraNode.get_nodeip_list()
        client = GearmanClient(GearmanNode.get_nodeip_list())
        client.submit_job("pre_schedule", pickle.dumps(dat),background=True)
        
        return bundle.obj
Esempio n. 3
0
def run(simulation, run):
	rivers = simulation.rivers.all()
	start_point = simulation.start_point
	end_point = simulation.end_point
	start_elevation = usgs_elevation(start_point.x, start_point.y)
	end_elevation = usgs_elevation(end_point.x, end_point.y)
	river_length = 0 
	for river in rivers:
		river_length += river.geom.length

	# This is dumb, we need to do a proper calculation but for now this is good enough
	river_length_ft = river_length * 69.1 * 5280
	number_of_cross_sections = 1
	upstream_width = 30
	downstream_width = 30

	distance_ft = start_point.distance(end_point) * 69.1 * 5280

	channels = [
		{
			'length' : distance_ft, 
			'cross_sections' : number_of_cross_sections,
			'start_elevation' : start_elevation, 
			'end_elevation' : end_elevation,
			'upstream_width' : upstream_width, 
			'downstream_width': downstream_width
		},
	]

	model_parameters = {
		'maxtimesteps' : 80, 
		'time_step': 300, 
		'time_weight': 0.6, 
		'amplitude': 1999.5, 
		'period': 3.33, 
		'phase_angle': 1.67, 
		'start_time': 0.0, 
		'end_time': 1.667
	}

	run_parameters = {
		'channels': channels, 
		'model_parameters': model_parameters,
		'simulation_id': simulation.id, 
		'run_id': run.id
	}
	
	client = GearmanClient(settings.GEARMAN_SERVERS)
	jobdata = json.dumps(run_parameters)
	client.submit_job('fourpt', jobdata, background=True)

	return True
Esempio n. 4
0
    def obj_create(self, bundle, **kwargs):
        #raise Exception(bundle.data)
        obj = Task.objects.get(id=bundle.data["id"])
        new_count = 0
        try:
            #obj.tasklet_count += bundle.data["increment"]
            #statsd.gauge('outstanding_tasks', bundle.data["increment"], delta=True)
            new_count = obj.tasklet_count + bundle.data["increment"]
        except KeyError:
            try:
                #obj.tasklet_count -= bundle.data["decrement"]
                #statsd.gauge('outstanding_tasks', -bundle.data["decrement"], delta=True)
                new_count = obj.tasklet_count - bundle.data["decrement"]

            except KeyError:
                pass
        if  new_count <= 0 and obj.tasklet_count == 0:
            obj.tasklet_count = 0
            obj.save()
            #we already scheduled the next procedure
            return
        obj.tasklet_count = new_count
        obj.save()

        
        if obj.tasklet_count == 0:
            obj.done = True
            obj.save()
            #statsd.gauge('outstanding_tasks', -1, delta=True)
            try:
                new_task = obj.task_set.all()[0]
                new_dataset = DatasetMetaResource().get_via_uri(new_task.output_dataset, bundle.request)
                old_dataset = DatasetMetaResource().get_via_uri(new_task.input_dataset, bundle.request)
                new_dataset.highest_ts = old_dataset.highest_ts
                new_dataset.lowest_ts = old_dataset.lowest_ts
                new_dataset.save()

                
            except IndexError:
                #no more tasks to schedule
                return
            dat = {}
            dat["task"] = new_task.procedure_url
            dat["task_id"] = new_task.id
            dat["output_dataset"] = new_task.output_dataset#+"?no_points=True"
            dat["input_dataset"] = new_task.input_dataset#+"?no_points=True"
            dat["cassandra_nodes"] = CassandraNode.get_nodeip_list()
            client = GearmanClient(GearmanNode.get_nodeip_list())
            client.submit_job("pre_schedule", pickle.dumps(dat),background=True) 
Esempio n. 5
0
def analysis_address(row):
    out_row = []

    input_id = []
    input_addr = row[1].encode("utf-8")
    api_output_id = []
    api_output_addr = []

    client = GearmanClient([GEARMAN_API])
    job_request = client.submit_job(FUNCTION, input_addr)

    loaded_json = json.loads(job_request.result)
    for addr in loaded_json:
        api_output_id.append(addr['id'])
        api_output_addr.append(addr['string'])
    if (row[6] != ''):
        input_id = row[6].strip().split(" ")
        input_id = list(map(int, input_id))
    compare_list = set(input_id) ^ set(api_output_id)
    different = len(compare_list)
    str_api_output_addr = ', '.join(api_output_addr)
    str_api_output_id = ' '.join(str(e) for e in api_output_id)
    out_row = [
        row[0], row[1], row[2], str_api_output_addr, row[6], str_api_output_id,
        different
    ]

    return out_row
Esempio n. 6
0
 def deal(self):
     self.write('work received<br>')
     new_client = GearmanClient(['192.168.5.41:4730'])
     current_request = new_client.submit_job('task_kanjia','heal the world')
     new_result = current_request.result
     print new_result
     self.write('work finished')
Esempio n. 7
0
def pushSocket(queue):
    sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
    serverAddr = ('0.0.0.0', 28282);
    sock.bind(serverAddr)

    send_thread = SendThread(sock, queue)
    send_thread.start()

    gm_client = GearmanClient(['localhost:4730'])

    while True:
        msg, addr = sock.recvfrom(2048)
        if not msg:
            continue
        #print "recevied:", msg, "from", addr

        try:
            data = json.loads(msg)
            topic = str(data['topic'])
            data['address'] = addr[0] + ':' + str(addr[1])
        except:
            continue

        gm_request = gm_client.submit_job(topic, json.dumps(data), background=False, wait_until_complete=False)

    sock.close()
Esempio n. 8
0
 def deal(self):
     self.write('work received<br>')
     new_client = GearmanClient(['192.168.5.41:4730'])
     current_request = new_client.submit_job('task_city','heal the world',
                                             wait_until_complete=False)
     new_result = current_request.result
     print new_result
     self.write('work finished')
Esempio n. 9
0
def send_request(req):
    new_client = GearmanClient([req.IP])

    s = time.time()
    request_dict={}
    request_dict["header"] = req.header
    request_dict["request"]= req.request
    if "pack_in" in req.params and req.params["pack_in"] == "0":
        current_request = new_client.submit_job(req.worker,request_dict)
    else:
        current_request = new_client.submit_job(req.worker,msgpack.packb(request_dict))

    if "pack_out" in req.params and req.params["pack_out"] == "0":
        current_result = current_request.result
    else:
        current_result = msgpack.unpackb(current_request.result)
    e = time.time()
    print "using time:%f" % (e-s)

    return current_result
Esempio n. 10
0
    def submit_job(self, force = False):
        if(self.job_handle and force == False):
            logging.debug("That job's already been submitted.")
        else: 
            logging.debug("Submitting job to generate %s" % (self.filename))
            client = GearmanClient(settings.GEARMAN_SERVERS)
            jobdata = json.dumps(self.get_run_parameters())
            jobrequest = client.submit_job(self.job_queue, jobdata, background=True)

            self.job_handle = jobrequest.gearman_job.handle
            self.job_complete = False
            self.save()
def doWork_fetchDependencyInfo(params):
    """
        params = {
            'projectId':projectId,
            'projectPath':projectInfo['projectPath'],
            'appName':data['appName'],
            'dependencyType':1,
        }
    """
    client = GearmanClient([GearmanConfig.gearmanConnection])
    data = json.dumps(params)
    request = client.submit_job(JobList.Job_fetchDependencyInfo, data, wait_until_complete=True)
    return request.result
Esempio n. 12
0
def generate(image, options, force_creation = False):
  simulation = image.simulation

  points = options['points']
  for point in points:
    logging.debug("Point: %d,%d" % (point['x'], point['y']))
  natural_width = int(options['naturalWidth'])
  natural_height = int(options['naturalHeight'])
  image.channel_width_points = json.dumps(points)
  image.image_natural_width = natural_width
  image.image_natural_height = natural_height
  image.save()

  geotiff_image = simulation.aerialmap.filname
  channel_image = simulation.channelmap.filename
  width_image = simulation.channelwidthmap.filename

  if (not os.path.isfile(width_image)) or force_creation == True :
    logging.debug("Channel width map image %s doesn't exist, generating..." % (width_image))

    if image.job_handle and force_creation == False:
      logging.debug("Job handle: %s already exists, not re-queueing" % (image.job_handle))
      return None
    else:
        run_parameters = {
           'channel_image' : simulation.channel_image,
           'channel_width_image' : simulation.channel_width_image,
           'elevation_map_image' : elevation_image
        }

        client = GearmanClient(settings.GEARMAN_SERVERS)
        jobdata = json.dumps(run_parameters)
        jobrequest = client.submit_job('elevation_map', jobdata, background=True)
            
        simulation.elevation_map_job_handle = jobrequest.gearman_job.handle
        simulation.elevation_map_job_complete = False
        simulation.save()

        return None
  else:
    img = Image.open(width_image)
    return img
Esempio n. 13
0
def generate(simulation_id, force_creation = False):
  simulation = Simulation.objects.get(pk = simulation_id)

  geotiff_image = simulation.aerial_geotiff
  channel_image = simulation.channel_image

      
  if (not os.path.isfile(channel_image)) or force_creation == True:

    if simulation.channel_tile_job_handle and force_creation == False:
        logging.debug("Job handle: %s already exists, not re-queueing" % (simulation.channel_tile_job_handle)) 
        return None
    else:
        logging.debug("Channel image %s doesn't exist, generating..." % (channel_image))

        run_parameters = {
          'tile_path': settings.RIVER_TILES_PATH,
          'geotiff_image': geotiff_image, 
          'channel_image': channel_image,
          'ortho_tiles': [tile.tile for tile in simulation.get_ortho_tiles()],
          'tile_width': 5000, 
          'tile_height': 5000 
        }

        client = GearmanClient(settings.GEARMAN_SERVERS)
        jobdata = json.dumps(run_parameters)
        jobrequest = client.submit_job('channel_image', jobdata, background=True)

        simulation.channel_tile_job_handle = jobrequest.gearman_job.handle
        simulation.channel_tile_job_complete = False
        simulation.save()

        return None

  else:  
    img = Image.open(channel_image)
    return img
Esempio n. 14
0
#!/usr/bin/python
# TODO: Initialize Logging
# TODO:  Direct log to file
# TODO: Use Gearman Configuration files
from gearman import GearmanClient

topnrelay = GearmanClient(['localhost:4730'])

# TODO: Use Redis Configuration files
import redis

redis_server = redis.Redis('localhost')

if __name__ == "__main__":
    # TODO: We can have multiple clients by allowing for an extended pattern say "tbp_a*" - <all words to be processed starting with a>
    # TODO: This can be config driven based on the scale that has to be achieved
    tbp_keys = redis_server.keys(pattern='tbp_*')
    for word in tbp_keys:
        # TODO: Most frequent words can be sent into the top N compute queue with higher priority
        # TODO: Log the call
        result = topnrelay.submit_job('topncompute',
                                      word,
                                      background=True,
                                      wait_until_complete=False)
Esempio n. 15
0
#!/usr/bin/env python2.6
#!/usr/bin/env python2.7
# coding=utf-8
from gearman import GearmanClient

gearman_client = GearmanClient(['127.0.0.1:4730'])

gearman_request = gearman_client.submit_job('echo', 'test gearman')

result_data = gearman_request.result
print result_data
Esempio n. 16
0
def send_request(WORKERNAME, HOST, POST, REQUEST):
    client = GearmanClient([HOST+":"+str(POST)])
    response = client.submit_job(WORKERNAME,packRequest(REQUEST))
    result = unpackResponse(response.result)
    return result
Esempio n. 17
0
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 03 16:17:58 2014

@author: jiran
"""

from gearman import GearmanClient
client = GearmanClient(['192.168.1.123:4730'])
request = client.submit_job('reverse','this string')
newresult = request.result
print newresult
Esempio n. 18
0
                                                                     '2014-11-7',
                                                                     '2014-11-4',
                                                                     '2014-11-5',
                                                                     '2014-11-8',
                                                                     '2014-12-1',
                                                                     '2014-8-5',
                                                                     '2014-8-4',
                                                                     '2014-8-7',
                                                                     '2014-8-6',
                                                                     '2014-8-1',
                                                                     '2014-8-2',
                                                                     '2014-8-9',
                                                                     '2014-8-8',
                                                                     '2014-10-17',
                                                                     '2014-10-16',
                                                                     '2014-10-15',
                                                                     '2014-10-14',
                                                                     '2014-10-13',
                                                                     '2014-10-11',
                                                                     '2014-10-10',
                                                                     '2014-10-18']}],
         'fill_type':{},
         'attr_weight':{},
         'needtransfer':[],
         'Parameters':{'n_clusters':i,'max_iter':300,'n_init':10,'init':'k-means++',
                       'precompute_distances':True,'tol':1e-4,'n_jobs':1}}
 a = json.dumps(attr)
 
 current_request = new_client.submit_job('data_cluster', a)
 new_result = current_request.result
 print new_result
#request = client.submit_job('echo', "mama")
#request = client.submit_job('bravo', "mama")
#print request.result

#request = client.submit_job('reverse', "baba", unique="id1234", poll_timeout=5)
#print request.result
#request = client.submit_job('JenkinsJobStatus', "burnit", poll_timeout=5)
#client.shutdown()
#request = client.submit_job('org.gearman.example.JenkinsInvokeJob', "wet", poll_timeout=5)
#request = client.submit_job('org.gearman.example.JenkinsJobStatus', "burnit", poll_timeout=5)
#request = client.submit_job('JenkinsJobStatus', simplejson.dumps(jenkins_burnit_status), poll_timeout=5)
#request = client.submit_job('JenkinsInvokeJob', "build:wet:vs2005", poll_timeout=5)
#request = client.submit_job('build:dog:python-2.7', "", poll_timeout=5)
#request = client.submit_job('build:lemon:centos', "", poll_timeout=5)

# do a build job
build_id = uuid.UUID('{0ab10213-0405-0607-0809-0a0b0c0d0e0f}').hex
#build_id = uuid.uuid4().hex
print build_id

jenkins_build_params = {'uuid':build_id,'param2':"true",'param3':'bingo'}

request = client.submit_job('stop:localhost', simplejson.dumps(jenkins_build_params), unique=build_id, poll_timeout=5)

#request = client.submit_job('echo', simplejson.dumps(jenkins_build_params), poll_timeout=5, unique=build_id)

print request.result
print 'Work complete with state %s' % request.state
#request = client.submit_job('echo', 'foo')
#print request.result
#print 'Work complete with state %s' % request.state
from gearman import GearmanClient
import simplejson
import uuid
import time

# create a client that will connect to the Gearman server running on
# localhost. The array allows you to specify a set of job servers.
#client = GearmanClient(['localhost:4730'])
client = GearmanClient(['15.185.117.66:4730'])

# Submit a synchronous job request to the job server and store the
print 'Sending job...'
build_id = uuid.uuid4().hex
print build_id
jenkins_build_params = {'uuid':build_id,'param2':"true",'param3':'bingo'}
request = client.submit_job('build:tangerine:gcc', simplejson.dumps(jenkins_build_params), poll_timeout=60, unique=build_id)

print request.result
print 'Work complete with state %s' % request.state
Esempio n. 21
0
def os_install_action(install_param):
    gearman_client = GearmanClient(['10.1.102.50:7070'])
    gearman_client.submit_job('rsync', json.dumps(install_param), background=True)
    return True
Esempio n. 22
0
def send_request(workername, host, request, packType="msgpack"):
    client = GearmanClient(host)
    response = client.submit_job(workername, packRequest(request, packType))
    result = unpackResponse(response.result)
    client.shutdown()
    return result
Esempio n. 23
0
    def set_new_file(video_id, new_id, filepath):
        ''' Returns a tuple with the first value T/F, and the second, if False,
        the relevant message. '''

        from helpers import getVideoInfo
        from PIL import Image
        from shutil import move

        up = {'id': new_id, 'filepath': filepath, 'pid': video_id}

        i = up['id']
        dupe = assets.find_one({'@graph.ma:locator': {'$elemMatch': {'@id': i}}})
        mp4 = (unicode(config.MEDIA_DIRECTORY) + i + '.mp4').encode('utf-8')
        webm = (unicode(config.MEDIA_DIRECTORY) + i + '.webm').encode('utf-8')

        if path.isfile(webm) or path.isfile(mp4) or dupe is not None:
            return (False, "That file (%s) already exists; try another unique ID." %i)

        filepath=unicode(config.INGEST_DIRECTORY + up['filepath'])
        new_file=unicode(config.MEDIA_DIRECTORY + up['id'] + ".mp4")

        md=getVideoInfo(filepath.encode('utf-8'))
        poster = config.POSTERS_DIRECTORY + "%s.jpg" % (up["id"])
        thumb = config.POSTERS_DIRECTORY + "%s_thumb.jpg" % (up["id"])
        move(filepath.encode('utf-8'), new_file.encode('utf-8'))
        assets.update({"_id":up["pid"]},{"$set":{
            "@graph.ma:frameRate":float(md["framerate"]),
            "@graph.ma:averageBitRate":int(float(md["bitrate"])),
            "@graph.ma:frameWidth":int(md["width"]),
            "@graph.ma:frameHeight":int(md["height"]),
            "@graph.ma:duration":int( round(float(md["duration"])) )/60,
            "@graph.ma:locator": [
                {
                    "@id": up["id"],
                    "ma:hasFormat": "video/mp4",
                    "ma:hasCompression": {"@id":"http://www.freebase.com/view/en/h_264_mpeg_4_avc","name": "avc.42E01E"}
                },
                {
                    "@id": up["id"],
                    "ma:hasFormat": "video/webm",
                    "ma:hasCompression": {"@id":"http://www.freebase.com/m/0c02yk5","name":"vp8.0"}
                }
            ]
        }})
        imgcmd = "avconv -i '%s' -q:v 1 -r 1 -t 00:00:01 -ss 00:00:30 -f image2 '%s'" % (new_file,poster)
        system(imgcmd.encode('utf-8'))
        chmod(poster,0775)
        im=Image.open(poster)
        im.thumbnail((160,90))
        im.save(thumb)
        chmod(thumb,0775)

        if not app.config.get('TESTING'):
            from gearman import GearmanClient
            client = GearmanClient(config.GEARMAN_SERVERS)
            client.submit_job("generate_webm", str(up["id"]))
        else:
            from ingest import generate_webm
            result = generate_webm(file_id=up['id'])
            if result == "ERROR":
                raise Exception("Could not convert media file.")

        return (True,)
Esempio n. 24
0
from gearman import GearmanClient

new_client = GearmanClient(['www.aispring.top:4730'])
current_request = new_client.submit_job('echo', 'foo')
new_result = current_request.result
print new_result
Esempio n. 25
0
#!/usr/bin/env python
#coding:utf8
# Author          : tuxpy
# Email           : [email protected]
# Last modified   : 2014-08-29 11:52:16
# Filename        : gearman_client.py
# Description     : 
from gearman import GearmanClient

new_client=GearmanClient(["192.168.8.116:1234"])
fd=open("亮剑.txt",'r')
line=fd.readline()
request=new_client.submit_job('pinyin',line)
print dir(request)

Esempio n. 26
0
#!/usr/bin/env python
import string

from PIL import Image
from gearman import GearmanClient 

gearman_client = GearmanClient(['ftester.chinacloudapp.cn:4730']) 
# gearman_client = GearmanClient(['172.31.1.92:4730']) 

path = './sample_images/tubingen.jpg'
data = open(path, 'rb').read()

ljust = string.ljust('kandinsky_e2_crop512', 100, ' ')
data = ljust + data

gearman_request = gearman_client.submit_job('test', data) 
result_data = gearman_request.result 
with open('test.jpg', 'w+') as pf:
	pf.write(result_data)
	#pf.write(data[10:])
from gearman import GearmanClient
import simplejson
import uuid
import time

# create a client that will connect to the Gearman server running on
# localhost. The array allows you to specify a set of job servers.
# client = GearmanClient(['localhost:4730'])
client = GearmanClient(["15.185.117.66:4730"])

# Submit a synchronous job request to the job server and store the
print "Sending job..."
build_id = uuid.uuid4().hex
# build_id = uuid.UUID('{0ab10213-0405-0607-0809-0a0b0c0d0e0f}').hex
print build_id
jenkins_build_params = {"uuid": build_id, "param2": "true", "param3": "bingo"}
# request = client.submit_job('build:pear:!ubuntu', simplejson.dumps(jenkins_build_params),wait_until_complete=False, poll_timeout=60, unique=build_id)
request = client.submit_job(
    "build:pear:!ubuntu", simplejson.dumps(jenkins_build_params), poll_timeout=60, unique=build_id
)

print request.result
print "Work complete with state %s" % request.state
from gearman import GearmanClient
import simplejson
import uuid
import time

# create a client that will connect to the Gearman server running on
# localhost. The array allows you to specify a set of job servers.
#client = GearmanClient(['localhost:4730'])
client = GearmanClient(['15.185.117.66:4730'])

# Submit a synchronous job request to the job server and store the
print 'Sending job...'
build_id = uuid.uuid4().hex
#build_id = uuid.UUID('{0ab10215-0405-0607-0809-0a0b0c0d0e0f}').hex
print build_id
jenkins_build_params = {'uuid':build_id,'param2':"true",'param3':'bingo'}
request = client.submit_job('build:grapefruit:master', simplejson.dumps(jenkins_build_params), poll_timeout=60, unique=build_id)

print request.result
print 'Work complete with state %s' % request.state
Esempio n. 29
0
    Randomizer function can be changed from client code, for example in order
    to create pseudo-random sequence or something like this
    '''
    return str(delimiter).join([str(randomizer(*randomizer_args)) for x in range(elements)])


# Create object of gearman client for pushing to server node tasks
# Gearman node connection params is taken from general settings module
client = GearmanClient(settings.STUB_GEARMAN_NODES)

while True:
    if random.random() < settings.STUB_TASKS_PROBABILITY.get('reverse', 0.5):
        # Add task for random word reversing
        word = random_word(*settings.STUB_TASKS_ARGS.get('reverse', []))
        client.submit_job('reverse', word, background=True)
        # TODO: logging!
        print 'Add reverse task for <%s>' % word

    if random.random() < settings.STUB_TASKS_PROBABILITY.get('sum', 0.5):
        # Add task for calculating sum of 4 digits
        sum = random_sum(*settings.STUB_TASKS_ARGS.get('sum', []))
        client.submit_job('sum', sum, background=True)
        # TODO: logging!
        print 'Add sum calculation task for <%s>' % sum

    if random.random() < settings.STUB_TASKS_PROBABILITY.get('multiple', 0.5):
        # Add task for calculating multiple of 2 digits
        multiple = random_multiple(*settings.STUB_TASKS_ARGS.get('multiple', []))
        client.submit_job('multiple', multiple, background=True)
        # TODO: logging!
def doWork_packageByPackageInfo(packageInfo):
    client = GearmanClient([GearmanConfig.gearmanConnection])
    data = json.dumps(packageInfo)
    request = client.submit_job(JobList.Job_package, data, wait_until_complete=False)
    pass
from gearman import GearmanClient
import simplejson
import uuid
import time

# create a client that will connect to the Gearman server running on
# localhost. The array allows you to specify a set of job servers.
client = GearmanClient(['localhost:4730'])

# Submit a synchronous job request to the job server and store the
print 'Sending job...'
build_id = uuid.uuid4().hex
print build_id
jenkins_build_params = {'uuid':build_id,'param2':"true",'param3':'bingo'}
request = client.submit_job('build:durian:oneiric-668621', simplejson.dumps(jenkins_build_params), poll_timeout=60, unique=build_id)

print request.result
print 'Work complete with state %s' % request.state
from gearman import GearmanClient
import simplejson
import uuid
import time

# create a client that will connect to the Gearman server running on
# localhost. The array allows you to specify a set of job servers.
#client = GearmanClient(['localhost:4730'])
client = GearmanClient(['15.185.117.66:4730'])

# Submit a synchronous job request to the job server and store the
print 'Sending job...'
build_id = uuid.uuid4().hex
print build_id
jenkins_build_params = {'uuid':build_id,'param2':"true",'param3':'bingo'}
request = client.submit_job('build:kiwi:centos', simplejson.dumps(jenkins_build_params), poll_timeout=60, unique=build_id)

print request.result
print 'Work complete with state %s' % request.state
Esempio n. 33
0
def pre_schedule(worker, job):
    """ Imports metadata from API """
    data = pickle.loads(job.data)
    #print data
    s = requests.Session()
    
    task_r = s.get("http://localhost:8001"+data["task"])
    print "doing:", data["task"]
    task_js = json.loads(task_r.text)
    
    function = task_js["async_function"]
    
    input_dataset = json.loads(s.get("http://localhost:8001"+data["input_dataset"]).text)
    output_dataset = json.loads(s.get("http://localhost:8001"+data["output_dataset"]).text)
    
    input_dataset_id = input_dataset["id"]
    output_dataset_id = output_dataset["id"]
    
    
    #print input_columns
    #print output_column
    #print function
    #print input_dataset
    #print output_dataset
    
    
    #schedule
    count = input_dataset["datapoint_count"]
    
    #TODO bucket list is "hardcoded" here. find some other way to do that
    bucket_list = ",".join(["'"+str(input_dataset_id)+"-"+str(c)+"'" for c in xrange(1+int(count/10000))])
    
    
    table = "tsstore"
    
    lowest_time = input_dataset["lowest_ts"]
    highest_time = input_dataset["highest_ts"]
    
    #partition time buckets
    date_format = "%Y-%m-%d %H:%M:%S.%f"
    lt = datetime.datetime.strptime(lowest_time, date_format)-datetime.timedelta(microseconds=1)
    ht = datetime.datetime.strptime(highest_time, date_format)
    diff = ht-lt
    
    #TODO change to some other value during production
    batch_size = 10
    
    try:
        interval = task_js["interval_in_seconds"]
        #is aggregation
        print "is aggregation"
        IS_CALCULATION = False
        num_tasks = int(diff.total_seconds()/interval)
        
        cols = []
        dims_with_cols = []
        for dim in input_dataset["dimensions"]:
            col_r = s.get("http://localhost:8001"+dim)
            jreq = json.loads(col_r.text)["ts_column"]
            cols.append(jreq)
            dims_with_cols.append(DimWithCol(dim, jreq))
        
        output_column = cols[0]
        input_columns = cols[1:]
        
        
    except KeyError:
        #is calculation
        print "is calculation"
        IS_CALCULATION = True
        cluster = Cluster(data["cassandra_nodes"])
        session = cluster.connect('ws')
        stmt = "select count(*) from tsstore where bucket in (%s) and dataset=%s;" % (bucket_list, input_dataset_id)
        row = session.execute(stmt)
        count = row[0]._asdict()["count"]
        
        session.shutdown()
        
        output_dimension = s.get("http://localhost:8001"+task_js["output_dimension"])
        output_column = json.loads(output_dimension.text)["ts_column"]
        
        input_dimensions = task_js["input_dimensions"]
        input_columns = []
        for ic in input_dimensions:
            ic_r = s.get("http://localhost:8001"+ic)
            input_columns.append(json.loads(ic_r.text)["ts_column"])
        interval = (diff.total_seconds()*batch_size)/count
        num_tasks = int(count/batch_size)
    
    #print interval
    #print count/batch_size
    
    #update task_count
    task_update_callback(data["task_id"], int(num_tasks))
    
    temp_ht = lt+datetime.timedelta(seconds=interval)+datetime.timedelta(microseconds=1)
    lt -= datetime.timedelta(microseconds=1)
    lowest_time = datetime.datetime.strftime(lt, date_format)
    highest_time = datetime.datetime.strftime(temp_ht, date_format)
        
    #while True:
    for XX in xrange(num_tasks+2):
        stmt = "select %s,dataset,bucket from %s where bucket in (%s) and time >= '%s' AND time <= '%s' and dataset=%s order by dataset, time;" % (",".join(input_columns+[output_column]+(["time"] if IS_CALCULATION else [])), table, bucket_list, lowest_time, highest_time, input_dataset_id)
        print stmt
        print num_tasks+1, XX
        #create job with previous stmt
        dat = {}
        dat["stmt"] = stmt
        dat["function"] = function
        dat["output_column"] = output_column
        dat["output_dataset"] = data["output_dataset"]
        dat["task_id"] = data["task_id"]
        dat["cassandra_nodes"] = data["cassandra_nodes"]
        client = GearmanClient(JOBSERVER_LIST)
        try:
            interval = task_js["interval_in_seconds"]
            #is aggregation
            #dat["input_columns"] = input_columns+[output_column]
            dat["input_dimensions"] = dims_with_cols
                
            client.submit_job("row_aggregator", pickle.dumps(dat),background=True)
        except KeyError:
            #is calculation
            dat["output_dimension"] = task_js["output_dimension"]
            client.submit_job("row_calculator", pickle.dumps(dat),background=True)
        
        #st = statsd.StatsClient("192.168.149.161",8125)
        #st.incr("task_created")

        #update timestamps
        lt += datetime.timedelta(seconds=interval)+datetime.timedelta(microseconds=1)
        temp_ht += datetime.timedelta(seconds=interval)+datetime.timedelta(microseconds=1)
        
        lowest_time = datetime.datetime.strftime(lt, date_format)
        highest_time = datetime.datetime.strftime(temp_ht, date_format)
        print lowest_time
        print highest_time
        
        #if lt > ht:
        #    break
    
    return "a"
Esempio n. 34
0
def os_install(server, install_param):
    gearman_client = GearmanClient(['%s:7070'] % server)
    gearman_client.submit_job('install', json.dumps(install_param), background=True)
    return True
Esempio n. 35
0
# coding = utf-8
from gearman import GearmanClient
import json
  
new_client = GearmanClient(['192.168.5.41:4730'])
attr = dict(dist=20, center={'lng': 121.435101153, 'lat': 31.1952376167}, time_start=1385740800, time_end=1405995856)
a = json.dumps(attr)

current_request = new_client.submit_job('user_lbs', a)
new_result = current_request.result
print new_result
Esempio n. 36
0
#request = client.submit_job('echo', "mama")
#request = client.submit_job('bravo', "mama")
#print request.result

#request = client.submit_job('reverse', "baba", poll_timeout=5)
#request = client.submit_job('reverse', "baba")
#print request.result
#request = client.submit_job('JenkinsJobStatus', "burnit", poll_timeout=5)
#client.shutdown()
#request = client.submit_job('org.gearman.example.JenkinsInvokeJob', "wet", poll_timeout=5)
#request = client.submit_job('org.gearman.example.JenkinsJobStatus', "burnit", poll_timeout=5)
#request = client.submit_job('JenkinsJobStatus', simplejson.dumps(jenkins_burnit_status), poll_timeout=5)
#request = client.submit_job('JenkinsInvokeJob', "build:wet:vs2005", poll_timeout=5)
#request = client.submit_job('build:dog:python-2.7', "", poll_timeout=5)
#request = client.submit_job('build:lemon:centos', "", poll_timeout=5)

# do a build job
job_params = {'param1':"12321",'param2':"true",'param3':"validate"}
jenkins_data = {'uuid':'id2021', 'params':unicode(job_params)}
#request = client.submit_job('build:pep8', simplejson.dumps(jenkins_data))
request = client.submit_job('stop:localhost', simplejson.dumps(jenkins_data), poll_timeout=5)

# do a stop job
#request = client.submit_job('stop:jenkins_master.hp.com',"")


print request.result
print 'Work complete with state %s' % request.state
#request = client.submit_job('echo', 'foo')
#print request.result
#print 'Work complete with state %s' % request.state
Esempio n. 37
0
#!/usr/bin/env python
#coding:utf8
# Author          : tuxpy
# Email           : [email protected]
# Last modified   : 2014-08-29 11:52:16
# Filename        : gearman_client.py
# Description     :
from gearman import GearmanClient

new_client = GearmanClient(["192.168.8.116:1234"])
fd = open("亮剑.txt", 'r')
line = fd.readline()
request = new_client.submit_job('pinyin', line)
print dir(request)