def _read_event(self):
     string = self.socket.recv().decode('utf-8')
     event = json.loads(string.split(None, 1)[1])
     logging.debug("Jenkins event received: " + json.dumps(event))
     for fileopts in self.files:
         output = {}
         source_url, out_event = self._parse_event(event, fileopts)
         job_filter = fileopts.get('job-filter')
         if (job_filter and
             not re.match(job_filter, out_event['fields']['build_name'])):
             continue
         build_queue_filter = fileopts.get('build-queue-filter')
         if (build_queue_filter and
             not re.match(build_queue_filter,
                          out_event['fields']['build_queue'])):
             continue
         output['source_url'] = source_url
         output['retry'] = fileopts.get('retry-get', False)
         output['event'] = out_event
         if 'subunit' in fileopts.get('name'):
             job = gear.Job(b'push-subunit',
                            json.dumps(output).encode('utf8'))
         else:
             job = gear.Job(b'push-log', json.dumps(output).encode('utf8'))
         try:
             self.gearman_client.submitJob(job)
         except:
             logging.exception("Exception submitting job to Gearman.")
Example #2
0
def gearmanjobstop(unique_id):
    datalist_info = {}
    output = None
    try:
        client = gear.Client()
        client.addServer(gear_server, port=gear_port)
        client.waitForServer()  # Wait for at least one server to be connected
        param = {"id": unique_id}
        mysql = mysqlLib()
        datalist = mysql.query_task(param)
        mysql.close()
        jobname = datalist[0][3]
        status = datalist[0][5]
        build_number = datalist[0][6]

        if (int(status) == 2):
            build_params = {"name": jobname, "number": str(build_number)}
            job = gear.Job("stop:" + gear_server, json.dumps(build_params))
            client.submitJob(job)
            print("INFO, job aborte start")
            output = "INFO, job aborte start"
        else:
            print("INFO, job not running now")
            output = "INFO, job not running now"
    except Exception as e:
        print(e)
        output = "ERROR, " + str(e)
    finally:
        datalist_info['output'] = output
        return datalist_info
Example #3
0
    def add_job(self, name, params):
        """ adds job to the gearman queue"""
        self.log.info("starting test run")
        uuid = str(uuid4().hex)

        gearman_job = gear.Job(name, json.dumps(params), unique=uuid)

        if not self.is_job_registered(gearman_job.name):
            self.log.error("Job %s is not registered with Gearman" %
                           gearman_job)
            self.on_job_completed(gearman_job, 'NOT_REGISTERED')
            #return build

        try:
            self.gearman.submitJob(gearman_job)
        except GearmanError:
            self.log.exception("Unable to submit job to Gearman")
            self.on_build_completed(gearman_job, 'EXCEPTION')
            #return build

        if not gearman_job.handle:
            self.log.error("No job handle was received for %s after 30 seconds"
                           " marking as lost." % gearman_job)
            self.on_build_completed(gearman_job, 'NO_HANDLE')

        self.log.debug("Received handle %s for job" % gearman_job.handle)
Example #4
0
def addtask(username, jenkinsurl, jobname, specifynode, build_params):
    mysql = mysqlLib()
    url = None
    try:
        param = (username, jenkinsurl, jobname, json.dumps(build_params), 0)
        n, last_id = mysql.add_task(param)
        print(u"INFO, job initial, 0")
        unique_id = last_id
        if (specifynode == 1):
            J = Jenkins(jenkinsurl)
            jobs = Jobs(J)
            job = jobs[jobname]
            build_params['UNIQUE_ID'] = unique_id
            invoke = job.invoke(build_params=build_params)
            print("INFO, specifynode true")
        elif (specifynode == 0):
            client = gear.Client()
            client.addServer(gear_server, port=gear_port)
            client.waitForServer(
            )  # Wait for at least one server to be connected
            build_params['UNIQUE_ID'] = unique_id
            job = gear.Job('build:' + jobname, json.dumps(build_params))
            client.submitJob(job)
            print("INFO, specifynode false")
        param = (1, unique_id)
        mysql.update_task_status(param)
        print(u"INFO, job submit, 1")
        print(u"INFO, job unique id :" + str(unique_id))
        url = "http://10.48.55.39:8889/jobstatus/?job=" + str(unique_id)
        print(u"INFO, you could link " + url + " to trace the job status")
    except Exception as e:
        print(e)
    finally:
        mysql.close()
        return url
Example #5
0
    def launch(user, template_id):
        try:
            pm_list = InternalAPI.get_user_permissions_on_resource(
                user, RS_TEM, template_id)

            if not pm_list[PM_LAUNCH_TEMPLATE]:
                return False, ARK_ERRMSG_CONTENT[1201]

            tem = JobTemplate.objects.get(id=template_id)

            # 先在数据库里创建一条job记录,再向gearman发一个任务
            job = Job.objects.create(
                name=tem.name,
                description='play',
                status='pending',
                start_time=timezone.now(),
                #end_time=timezone.now(),
                extra_variables=tem.extra_variables,
                result='',
                user=user,
                job_template=tem,
            )
            client = gear.Client()
            client.addServer(settings.GEARMAN_SERVER, 4730)
            client.waitForServer()
            job.status = 'running'
            job.save()

            inv = tem.inventory
            inv_file = os.path.join(settings.INVENTORY_DIR,
                                    str(inv.id) + '_' + str(job.id) + '.yaml')

            with open(inv_file, 'w') as f:
                f.write(inv.gen_content())

            job_data = {
                'callback_url':
                settings.CALLBACK_HOST +
                reverse('job:remote_update', kwargs={'job_id': job.id}),
                'inventory_file':
                inv_file,
                'playbook_file':
                os.path.join(settings.PROJECT_DIR, str(tem.project.id),
                             tem.playbook),
                'args': {
                    'extra_variables': json.loads(tem.extra_variables),
                    'limit': tem.limit,
                    'forks': str(tem.forks),
                    'job_tags': tem.job_tags if tem.job_tags != '' else None,
                    'verbosity': '2',
                    "check": False,
                },
            }
            gearman_job = gear.Job('run_playbook',
                                   bytes(json.dumps(job_data), 'utf-8'))
            client.submitJob(gearman_job, background=True)

            return True, None
        except Exception as e:
            return False, str(e)
Example #6
0
    def main(self):
        gclient = gear.Client()
        gclient.addServer(self.args.server)
        gclient.waitForServer()  # Wait for at least one server to be connected

        if not isinstance(self.args.params, dict):
            build_params = simplejson.loads(self.args.params)
        else:
            build_params = self.args.params

        for x in range(0, int(self.args.jobs)):
            job_id = uuid.uuid4().hex
            build_params.update({'uuid': job_id})
            gjob = gear.Job(self.args.function,
                            simplejson.dumps(build_params),
                            unique=job_id)
            if self.args.wait:
                print "\n" + time.asctime(time.localtime(time.time()))
            print "Sending job: " + self.args.function + " to " + self.args.server + " with params=" + str(
                build_params)
            gclient.submitJob(gjob)

            # wait for last job to complete before exiting
            if self.args.wait:
                finished = False
                while True:
                    if (gjob.complete):
                        print time.asctime(time.localtime(time.time()))
                        print "Job Result: " + str(gjob.data) + "\n"
                        finished = True
                    time.sleep(1)
                    if finished:
                        break
Example #7
0
def gearmanjob(gearman_srv_list, jobname, build_params):
    client = gear.Client()
    for item in gearman_srv_list:
        item_list = item.split(":")
        client.addServer(item_list[0], port=int(item_list[1]))
    client.waitForServer()  # Wait for at least one server to be connected
    job = gear.Job('build:' + jobname, json.dumps(build_params))
    client.submitJob(job)
Example #8
0
    def test_text_job_name(self):
        self.worker.registerFunction('test')

        for jobcount in range(2):
            job = gear.Job('test', b'testdata')
            self.client.submitJob(job)
            self.assertNotEqual(job.handle, None)

            workerjob = self.worker.getJob()
            self.assertEqual('test', workerjob.name)
Example #9
0
 def cancelRunningBuild(self, build):
     stop_uuid = str(uuid4().hex)
     data = dict(name=build.job.name, number=build.number)
     stop_job = gear.Job("stop:%s" % build.__gearman_manager,
                         json.dumps(data),
                         unique=stop_uuid)
     self.meta_jobs[stop_uuid] = stop_job
     self.log.debug("Submitting stop job: %s", stop_job)
     self.gearman.submitJob(stop_job, precedence=gear.PRECEDENCE_HIGH)
     return True
Example #10
0
 def submitJob(self,
               name,
               data,
               build_set,
               precedence=zuul.model.PRECEDENCE_NORMAL):
     uuid = str(uuid4().hex)
     self.log.debug("Submitting job %s with data %s" % (name, data))
     job = gear.Job(name, json.dumps(data), unique=uuid)
     self.build_sets[uuid] = build_set
     self.gearman.submitJob(job, precedence=precedence)
Example #11
0
    def test_handleStatusRes_2(self):
        client = gear.Client()

        packet = gear.Packet(gear.constants.RES, gear.constants.STATUS_RES,
                             b'H:127.0.0.1:11\x001\x000\x00\x00')
        packet.getJob = lambda: gear.Job("", "")
        job = client.handleStatusRes(packet)

        self.assertTrue(job.known)
        self.assertFalse(job.running)
Example #12
0
    def submit_job(self, name, data):
        if not self.job:
            self.job = gear.Job(name,
                                json.dumps(data),
                                unique=str(time.time()))
            self.gearman.submitJob(self.job)
        else:
            raise Exception('A job already exists in self.job')

        return self.job
Example #13
0
 def test_worker(self):
     tdir = self.useFixture(fixtures.TempDir())
     dsn = 'sqlite:///%s/test.db' % tdir.path
     w = worker.GearhornWorker(client_id='test_worker', dsn=dsn)
     w._store.initialize_schema()
     self.addCleanup(w.shutdown)
     w.addServer('localhost', self.server.port)
     w.registerSubscriberFunctions()
     w.registerFanoutFunction()
     subw = gear.Worker('test_worker_subw')
     subw.addServer('localhost', self.server.port)
     subw.waitForServer()
     subw.registerFunction('broadcasts_test_receiver')
     subscribe_message = {
         'client_id': 'test_receiver',
         'topic': 'broadcasts'
     }
     subscribe_job = gear.Job(
         w.subscribe_name,
         arguments=json.dumps(subscribe_message).encode('utf-8'))
     self.client.submitJob(subscribe_job)
     # w should have this message only
     w.work()
     while not subscribe_job.complete:
         time.sleep(0.1)
     fanout_message = {'topic': 'broadcasts', 'payload': 'in payload'}
     job = gear.Job(w.fanout_name,
                    json.dumps(fanout_message).encode('utf-8'))
     self.client.submitJob(job)
     # Thread in the background to wait for subw to complete job
     t = threading.Thread(target=w.work)
     t.start()
     broadcasted = subw.getJob()
     self.assertEqual('in payload', broadcasted.arguments)
     broadcasted.sendWorkComplete()
     # wait for complete to wind through tubes
     while not job.complete:
         time.sleep(0.1)
     self.assertFalse(job.failure)
     self.assertIsNone(job.exception)
     self.assertEqual([b'1'], job.data)
     t.join()
Example #14
0
def main():
    c = gear.Client()

    parser = argparse.ArgumentParser(description='Trigger a Zuul job.')
    parser.add_argument('--job', dest='job', required=True, help='Job Name')
    parser.add_argument('--project',
                        dest='project',
                        required=True,
                        help='Project name')
    parser.add_argument('--pipeline',
                        dest='pipeline',
                        default='release',
                        help='Zuul pipeline')
    parser.add_argument('--refname', dest='refname', help='Ref name')
    parser.add_argument('--oldrev',
                        dest='oldrev',
                        default='0000000000000000000000000000000000000000',
                        help='Old revision (SHA)')
    parser.add_argument('--newrev', dest='newrev', help='New revision (SHA)')
    parser.add_argument('--url',
                        dest='url',
                        default='http://zuul.openstack.org/p',
                        help='Zuul URL')
    parser.add_argument('--logpath',
                        dest='logpath',
                        required=True,
                        help='Path for log files.')
    args = parser.parse_args()

    data = {
        'ZUUL_PIPELINE': args.pipeline,
        'ZUUL_PROJECT': args.project,
        'ZUUL_UUID': str(uuid4().hex),
        'ZUUL_REF': args.refname,
        'ZUUL_REFNAME': args.refname,
        'ZUUL_OLDREV': args.oldrev,
        'ZUUL_NEWREV': args.newrev,
        'ZUUL_SHORT_OLDREV': args.oldrev[:7],
        'ZUUL_SHORT_NEWREV': args.newrev[:7],
        'ZUUL_COMMIT': args.newrev,
        'ZUUL_URL': args.url,
        'LOG_PATH': args.logpath,
    }

    c.addServer('127.0.0.1', 4730)
    c.waitForServer()

    job = gear.Job("build:%s" % args.job,
                   json.dumps(data),
                   unique=data['ZUUL_UUID'])
    c.submitJob(job, precedence=gear.PRECEDENCE_HIGH)

    while not job.complete:
        time.sleep(1)
Example #15
0
    def submitJob(self, name, data):
        self.log.debug("Submitting job %s with data %s" % (name, data))
        job = gear.Job(name, json.dumps(data), unique=str(time.time()))
        self.gearman.submitJob(job, timeout=300)

        self.log.debug("Waiting for job completion")
        while not job.complete:
            time.sleep(0.1)
        if job.exception:
            raise RPCFailure(job.exception)
        self.log.debug("Job complete, success: %s" % (not job.failure))
        return job
Example #16
0
    def sync(user, project_id, password):
        try:
            pm_list = InternalAPI.get_user_permissions_on_resource(
                user, RS_PRO, project_id)

            if not pm_list[PM_SYNC_PROJECT]:
                return False, ARK_ERRMSG_CONTENT[1201]

            pro = Project.objects.get(id=project_id)

            # 先在数据库里创建一条job记录,再向gearman发一个任务
            job = Job.objects.create(
                name='update_repo',
                description='sync git',
                status='pending',
                start_time=timezone.now(),
                end_time=timezone.now(),
                result='',
                user=user,
                job_template=None,
            )
            client = gear.Client()
            client.addServer(settings.GEARMAN_SERVER, 4730)
            client.waitForServer()
            job.status = 'running'
            job.save()
            pro.last_sync_job = job
            pro.save()
            job_data = {
                'callback_url':
                settings.CALLBACK_HOST +
                reverse('job:remote_update', kwargs={'job_id': job.id}),
                'inventory_file':
                settings.GIT_SYNC_INVENTORY,
                'playbook_file':
                settings.GIT_SYNC_PLAYBOOK,
                'args': {
                    'extra_variables': {
                        'gituser': pro.username,
                        'gitpassword': password,
                        'giturl': pro.url.replace('https://', ''),
                        'gitbranch': pro.branch,
                    },
                },
            }
            gearman_job = gear.Job('run_playbook',
                                   bytes(json.dumps(job_data), 'utf-8'))
            client.submitJob(gearman_job, background=True)

            return True, None
        except Exception as e:
            return False, str(e)
Example #17
0
    def test_bg_job(self):
        self.worker.registerFunction('test')

        job = gear.Job(b'test', b'testdata')
        self.client.submitJob(job, background=True)
        self.assertNotEqual(job.handle, None)
        self.client.shutdown()
        del self.client

        workerjob = self.worker.getJob()
        self.assertEqual(workerjob.handle, job.handle)
        self.assertEqual(workerjob.arguments, b'testdata')
        workerjob.sendWorkData(b'workdata')
        workerjob.sendWorkComplete()
Example #18
0
 def fanout(self, job):
     try:
         message = json.loads(job.arguments)
         if not isinstance(message, dict):
             raise ValueError('must be a JSON mapping.')
         if 'topic' not in message or 'payload' not in message:
             raise ValueError('must have topic and payload keys')
     except ValueError as e:
         job.sendWorkException(bytes(str(e).encode('utf-8')))
         return
     wait_jobs = []
     errors = []
     for sub in self._store.get_subscribers(message['topic']):
         name = '%s_%s' % (message['topic'], sub)
         unique = util.to_utf8(message.get('unique'))
         cj = gear.Job(util.to_utf8(name),
                       arguments=util.to_utf8(message['payload']),
                       unique=unique)
         try:
             self.client.submitJob(cj,
                                   background=message.get(
                                       'background', False))
             if not message.get('background'):
                 wait_jobs.append((sub, cj))
         except gear.GearmanError as e:
             errors.append((sub, str(e)))
     done = 0
     # Timeout just in case
     before = time.time()
     while done < len(wait_jobs):
         for sub, wj in wait_jobs:
             if wj.complete and wj.failure:
                 if wj.exception:
                     errors.append((sub, wj.exception))
                 else:
                     errors.append((sub, 'Worker failure'))
             elif wj.complete:
                 done += 1
         time.sleep(0.1)
         if time.time() - before > self.foreground_timeout:
             # timed out
             for sub, wj in wait_jobs:
                 if not wj.complete:
                     errors.append((sub, 'Worker timeout'))
             break
     if errors:
         job.sendWorkException(json.dumps(errors))
     else:
         job.sendWorkComplete(bytes('%d' % done).encode('utf-8'))
     return True
Example #19
0
 def _read_event(self):
     string = self.socket.recv().decode('utf-8')
     event = json.loads(string.split(None, 1)[1])
     logging.debug("Jenkins event received: " + json.dumps(event))
     for fileopts in self.files:
         output = {}
         source_url, out_event = self._parse_event(event, fileopts)
         output['source_url'] = source_url
         output['retry'] = fileopts.get('retry-get', False)
         output['event'] = out_event
         job = gear.Job(b'push-log', json.dumps(output).encode('utf8'))
         try:
             self.gearman_client.submitJob(job)
         except:
             logging.exception("Exception submitting job to Gearman.")
    def main(self):
        gclient = gear.Client()
        gclient.addServer(self.args.server)
        gclient.waitForServer()  # Wait for at least one server to be connected

        if not isinstance(self.args.params, dict):
            build_params = json.loads(self.args.params)
        else:
            build_params = self.args.params

        job_queue = deque()
        job = None
        num_jobs = int(self.args.iterations)
        offline_node = build_params.get("OFFLINE_NODE_WHEN_COMPLETE", "false")
        if (num_jobs > 1 and offline_node.lower()=="true"):
            print "WARN: Offline node requested multiple times, may need to online nodes to complete this request"
        print "\n" + time.asctime( time.localtime(time.time()))
        for x in range(0, num_jobs):
            job_id = uuid.uuid4().hex
            build_params.update({'uuid':job_id})
            job = gear.Job(self.args.function,
                           json.dumps(build_params),
                           unique=job_id)

            print "Sending job: " + self.args.function + " to " + self.args.server + " with params=" + str(build_params)
            gclient.submitJob(job)
            job_queue.append(job)
          
        # wait for jobs to complete before exiting
        print ("\nWaiting for jobs to finish"),
        finished = False
        while True:
            sys.stdout.write('.')
            sys.stdout.flush()
            if (job.complete):
                print "\n\n-----  Job Results (" + time.asctime(time.localtime(time.time())) + ")  ------ \n"
                while (len(job_queue) != 0) :
                    cjob = job_queue.popleft()
                    if (cjob.failure or cjob.exception):
                        # job request failed for some reason
                        print cjob.unique + ' :  Failed'
                    else:
                        print cjob.unique + ' : ' + str(cjob.data)
                finished = True

            time.sleep(1);
            if finished:
                break
Example #21
0
    def test_job(self):
        self.worker.registerFunction('test')

        for jobcount in range(2):
            job = gear.Job(b'test', b'testdata')
            self.client.submitJob(job)
            self.assertNotEqual(job.handle, None)

            workerjob = self.worker.getJob()
            self.assertEqual(workerjob.handle, job.handle)
            self.assertEqual(workerjob.arguments, b'testdata')
            workerjob.sendWorkData(b'workdata')
            workerjob.sendWorkComplete()

            for count in iterate_timeout(30, "job completion"):
                if job.complete:
                    break
            self.assertTrue(job.complete)
            self.assertEqual(job.data, [b'workdata'])
Example #22
0
    def setBuildDescription(self, build, desc):
        try:
            name = "set_description:%s" % build.__gearman_manager
        except AttributeError:
            # We haven't yet received the first data packet that tells
            # us where the job is running.
            return False

        if not self.isJobRegistered(name):
            return False

        desc_uuid = str(uuid4().hex)
        data = dict(name=build.job.name,
                    number=build.number,
                    html_description=desc)
        desc_job = gear.Job(name, json.dumps(data), unique=desc_uuid)
        self.meta_jobs[desc_uuid] = desc_job
        self.log.debug("Submitting describe job: %s", desc_job)
        self.gearman.submitJob(desc_job, precedence=gear.PRECEDENCE_LOW)
        return True
Example #23
0
def run(client: Any, job_name: str, args: Any = dict()) -> Any:
    job = gear.Job(job_name.encode('utf-8'), json.dumps(args).encode('utf-8'))
    client.submitJob(job, timeout=300)
    while not job.complete:
        time.sleep(0.1)
    return json.loads(job.data[0])
Example #24
0
 def _make_sub_job(self, name, topic):
     message = {'client_id': self.client_id, 'topic': topic}
     return gear.Job(name=name,
                     arguments=bytes(json.dumps(message).encode('utf-8')))
Example #25
0
    def launch(self, job, item, pipeline, dependent_items=[]):
        self.log.info("Launch job %s for change %s with dependent changes %s" %
                      (job, item.change, [x.change for x in dependent_items]))
        dependent_items = dependent_items[:]
        dependent_items.reverse()
        uuid = str(uuid4().hex)
        params = dict(ZUUL_UUID=uuid, ZUUL_PROJECT=item.change.project.name)
        params['ZUUL_PIPELINE'] = pipeline.name
        params['ZUUL_URL'] = item.current_build_set.zuul_url
        if hasattr(item.change, 'refspec'):
            changes_str = '^'.join([
                '%s:%s:%s' %
                (i.change.project.name, i.change.branch, i.change.refspec)
                for i in dependent_items + [item]
            ])
            params['ZUUL_BRANCH'] = item.change.branch
            params['ZUUL_CHANGES'] = changes_str
            params['ZUUL_REF'] = (
                'refs/zuul/%s/%s' %
                (item.change.branch, item.current_build_set.ref))
            params['ZUUL_COMMIT'] = item.current_build_set.commit

            zuul_changes = ' '.join([
                '%s,%s' % (i.change.number, i.change.patchset)
                for i in dependent_items + [item]
            ])
            params['ZUUL_CHANGE_IDS'] = zuul_changes
            params['ZUUL_CHANGE'] = str(item.change.number)
            params['ZUUL_PATCHSET'] = str(item.change.patchset)
        if hasattr(item.change, 'ref'):
            params['ZUUL_REFNAME'] = item.change.ref
            params['ZUUL_OLDREV'] = item.change.oldrev
            params['ZUUL_NEWREV'] = item.change.newrev

            params['ZUUL_REF'] = item.change.ref
            params['ZUUL_COMMIT'] = item.change.newrev

        # The destination_path is a unqiue path for this build request
        # and generally where the logs are expected to be placed
        destination_path = os.path.join(item.change.getBasePath(),
                                        pipeline.name, job.name, uuid[:7])
        params['BASE_LOG_PATH'] = item.change.getBasePath()
        params['LOG_PATH'] = destination_path

        # Allow the job to update the params
        self.updateBuildParams(job, item, params)

        # This is what we should be heading toward for parameters:

        # required:
        # ZUUL_UUID
        # ZUUL_REF (/refs/zuul/..., /refs/tags/foo, master)
        # ZUUL_COMMIT

        # optional:
        # ZUUL_PROJECT
        # ZUUL_PIPELINE

        # optional (changes only):
        # ZUUL_BRANCH
        # ZUUL_CHANGE
        # ZUUL_CHANGE_IDS
        # ZUUL_PATCHSET

        # optional (ref updated only):
        # ZUUL_OLDREV
        # ZUUL_NEWREV

        if 'ZUUL_NODE' in params:
            name = "build:%s:%s" % (job.name, params['ZUUL_NODE'])
        else:
            name = "build:%s" % job.name
        build = Build(job, uuid)
        build.parameters = params

        if job.name == 'noop':
            build.result = 'SUCCESS'
            self.sched.onBuildCompleted(build)
            return build

        gearman_job = gear.Job(name, json.dumps(params), unique=uuid)
        build.__gearman_job = gearman_job
        self.builds[uuid] = build

        if not self.isJobRegistered(gearman_job.name):
            self.log.error("Job %s is not registered with Gearman" %
                           gearman_job)
            self.onBuildCompleted(gearman_job, 'NOT_REGISTERED')
            return build

        if pipeline.precedence == zuul.model.PRECEDENCE_NORMAL:
            precedence = gear.PRECEDENCE_NORMAL
        elif pipeline.precedence == zuul.model.PRECEDENCE_HIGH:
            precedence = gear.PRECEDENCE_HIGH
        elif pipeline.precedence == zuul.model.PRECEDENCE_LOW:
            precedence = gear.PRECEDENCE_LOW

        try:
            self.gearman.submitJob(gearman_job, precedence=precedence)
        except Exception:
            self.log.exception("Unable to submit job to Gearman")
            self.onBuildCompleted(gearman_job, 'EXCEPTION')
            return build

        if not gearman_job.handle:
            self.log.error("No job handle was received for %s after 30 seconds"
                           " marking as lost." % gearman_job)
            self.onBuildCompleted(gearman_job, 'NO_HANDLE')

        self.log.debug("Received handle %s for %s" %
                       (gearman_job.handle, build))

        return build
Example #26
0
    def launch(self, job, item, pipeline, dependent_items=[]):
        self.log.info("Launch job %s for change %s with dependent changes %s" %
                      (job, item.change, [x.change for x in dependent_items]))
        dependent_items = dependent_items[:]
        dependent_items.reverse()
        uuid = str(uuid4().hex)
        params = dict(ZUUL_UUID=uuid, ZUUL_PROJECT=item.change.project.name)
        params['ZUUL_PIPELINE'] = pipeline.name
        if hasattr(item.change, 'refspec'):
            changes_str = '^'.join([
                '%s:%s:%s' %
                (i.change.project.name, i.change.branch, i.change.refspec)
                for i in dependent_items + [item]
            ])
            params['ZUUL_BRANCH'] = item.change.branch
            params['ZUUL_CHANGES'] = changes_str
            params['ZUUL_REF'] = (
                'refs/zuul/%s/%s' %
                (item.change.branch, item.current_build_set.ref))
            params['ZUUL_COMMIT'] = item.current_build_set.commit

            zuul_changes = ' '.join([
                '%s,%s' % (i.change.number, i.change.patchset)
                for i in dependent_items + [item]
            ])
            params['ZUUL_CHANGE_IDS'] = zuul_changes
            params['ZUUL_CHANGE'] = str(item.change.number)
            params['ZUUL_PATCHSET'] = str(item.change.patchset)
        if hasattr(item.change, 'ref'):
            params['ZUUL_REFNAME'] = item.change.ref
            params['ZUUL_OLDREV'] = item.change.oldrev
            params['ZUUL_NEWREV'] = item.change.newrev

            params['ZUUL_REF'] = item.change.ref
            params['ZUUL_COMMIT'] = item.change.newrev

        # This is what we should be heading toward for parameters:

        # required:
        # ZUUL_UUID
        # ZUUL_REF (/refs/zuul/..., /refs/tags/foo, master)
        # ZUUL_COMMIT

        # optional:
        # ZUUL_PROJECT
        # ZUUL_PIPELINE

        # optional (changes only):
        # ZUUL_BRANCH
        # ZUUL_CHANGE
        # ZUUL_CHANGE_IDS
        # ZUUL_PATCHSET

        # optional (ref updated only):
        # ZUUL_OLDREV
        # ZUUL_NEWREV

        if callable(job.parameter_function):
            pargs = inspect.getargspec(job.parameter_function)
            if len(pargs.args) == 2:
                job.parameter_function(item, params)
            else:
                job.parameter_function(item, job, params)
            self.log.debug("Custom parameter function used for job %s, "
                           "change: %s, params: %s" %
                           (job, item.change, params))

        if 'ZUUL_NODE' in params:
            name = "build:%s:%s" % (job.name, params['ZUUL_NODE'])
        else:
            name = "build:%s" % job.name
        build = Build(job, uuid)
        build.parameters = params

        gearman_job = gear.Job(name, json.dumps(params), unique=uuid)
        build.__gearman_job = gearman_job
        self.builds[uuid] = build

        if not self.isJobRegistered(gearman_job.name):
            self.log.error("Job %s is not registered with Gearman" %
                           gearman_job)
            self.onBuildCompleted(gearman_job, 'LOST')
            return build

        if pipeline.precedence == zuul.model.PRECEDENCE_NORMAL:
            precedence = gear.PRECEDENCE_NORMAL
        elif pipeline.precedence == zuul.model.PRECEDENCE_HIGH:
            precedence = gear.PRECEDENCE_HIGH
        elif pipeline.precedence == zuul.model.PRECEDENCE_LOW:
            precedence = gear.PRECEDENCE_LOW

        try:
            self.gearman.submitJob(gearman_job, precedence=precedence)
        except Exception:
            self.log.exception("Unable to submit job to Gearman")
            self.onBuildCompleted(gearman_job, 'LOST')
            return build

        if not gearman_job.handle:
            self.log.error("No job handle was received for %s after 30 seconds"
                           " marking as lost." % gearman_job)
            self.onBuildCompleted(gearman_job, 'LOST')

        return build
Example #27
0
import gear


def check_request_status(job):
    if job.complete:
        print "Job %s finished!  Result: %s" % (job.name, job.data)
    elif job.warning:
        print "Job %s warning! exception: %s" % (job.name, job.exception)
    elif job.failure:
        print "Job %s failed! exception: %s" % (job.name, job.exception)


client = gear.Client()
client.addServer('192.168.122.89')
client.waitForServer()
job = gear.Job("reverse", "test string")
client.submitJob(job)

check_request_status(job)