def launch(self, job, change, dependent_changes=[]): self.log.info("Launch job %s for change %s with dependent changes %s" % (job, change, dependent_changes)) dependent_changes = dependent_changes[:] dependent_changes.reverse() uuid = str(uuid1()) params = dict(UUID=uuid, GERRIT_PROJECT=change.project.name) if hasattr(change, 'refspec'): changes_str = '^'.join( ['%s:%s:%s' % (c.project.name, c.branch, c.refspec) for c in dependent_changes + [change]]) params['GERRIT_BRANCH'] = change.branch params['GERRIT_CHANGES'] = changes_str if hasattr(change, 'ref'): params['GERRIT_REFNAME'] = change.ref params['GERRIT_OLDREV'] = change.oldrev params['GERRIT_NEWREV'] = change.newrev if callable(job.parameter_function): job.parameter_function(change, params) self.log.debug("Custom parameter function used for job %s, " "change: %s, params: %s" % (job, change, params)) build = Build(job, uuid) # We can get the started notification on another thread before # this is done so we add the build even before we trigger the # job on Jenkins. self.builds[uuid] = build # Sometimes Jenkins may erroneously respond with a 404. Handle # that by retrying for 30 seconds. launched = False errored = False for count in range(6): try: self.jenkins.build_job(job.name, parameters=params) launched = True break except: errored = True self.log.exception("Exception launching build %s for " "job %s for change %s (will retry):" % (build, job, change)) time.sleep(5) if errored: if launched: self.log.error("Finally able to launch %s" % build) else: self.log.error("Unable to launch %s, even after retrying, " "declaring lost" % build) # To keep the queue moving, declare this as a lost build # so that the change will get dropped. self.onBuildCompleted(build.uuid, 'LOST', None, None) return build
def execute(self, job, item, pipeline, dependent_changes=[], merger_items=[]): tenant = pipeline.tenant uuid = str(uuid4().hex) nodeset = item.current_build_set.getJobNodeSet(job.name) self.log.info( "Execute job %s (uuid: %s) on nodes %s for change %s " "with dependent changes %s" % ( job, uuid, nodeset, item.change, dependent_changes)) project = dict( name=item.change.project.name, short_name=item.change.project.name.split('/')[-1], canonical_hostname=item.change.project.canonical_hostname, canonical_name=item.change.project.canonical_name, src_dir=os.path.join('src', item.change.project.canonical_name), ) zuul_params = dict(build=uuid, buildset=item.current_build_set.uuid, ref=item.change.ref, pipeline=pipeline.name, job=job.name, voting=job.voting, project=project, tenant=tenant.name, timeout=job.timeout, jobtags=sorted(job.tags), _inheritance_path=list(job.inheritance_path)) if job.override_checkout: zuul_params['override_checkout'] = job.override_checkout if hasattr(item.change, 'branch'): zuul_params['branch'] = item.change.branch if hasattr(item.change, 'tag'): zuul_params['tag'] = item.change.tag if hasattr(item.change, 'number'): zuul_params['change'] = str(item.change.number) if hasattr(item.change, 'url'): zuul_params['change_url'] = item.change.url if hasattr(item.change, 'patchset'): zuul_params['patchset'] = str(item.change.patchset) if (hasattr(item.change, 'oldrev') and item.change.oldrev and item.change.oldrev != '0' * 40): zuul_params['oldrev'] = item.change.oldrev if (hasattr(item.change, 'newrev') and item.change.newrev and item.change.newrev != '0' * 40): zuul_params['newrev'] = item.change.newrev zuul_params['projects'] = {} # Set below zuul_params['items'] = dependent_changes params = dict() params['job'] = job.name params['timeout'] = job.timeout params['post_timeout'] = job.post_timeout params['items'] = merger_items params['projects'] = [] if hasattr(item.change, 'branch'): params['branch'] = item.change.branch else: params['branch'] = None params['override_branch'] = job.override_branch params['override_checkout'] = job.override_checkout params['repo_state'] = item.current_build_set.repo_state def make_playbook(playbook): d = playbook.toDict() for role in d['roles']: if role['type'] != 'zuul': continue project_metadata = item.layout.getProjectMetadata( role['project_canonical_name']) if project_metadata: role['project_default_branch'] = \ project_metadata.default_branch else: role['project_default_branch'] = 'master' role_trusted, role_project = item.layout.tenant.getProject( role['project_canonical_name']) role_connection = role_project.source.connection role['connection'] = role_connection.connection_name role['project'] = role_project.name return d if job.name != 'noop': params['playbooks'] = [make_playbook(x) for x in job.run] params['pre_playbooks'] = [make_playbook(x) for x in job.pre_run] params['post_playbooks'] = [make_playbook(x) for x in job.post_run] nodes = [] for node in nodeset.getNodes(): n = node.toDict() n.update(dict(name=node.name, label=node.label)) nodes.append(n) params['nodes'] = nodes params['groups'] = [group.toDict() for group in nodeset.getGroups()] params['vars'] = job.variables params['host_vars'] = job.host_variables params['group_vars'] = job.group_variables params['zuul'] = zuul_params projects = set() required_projects = set() def make_project_dict(project, override_branch=None, override_checkout=None): project_metadata = item.layout.getProjectMetadata( project.canonical_name) if project_metadata: project_default_branch = project_metadata.default_branch else: project_default_branch = 'master' connection = project.source.connection return dict(connection=connection.connection_name, name=project.name, canonical_name=project.canonical_name, override_branch=override_branch, override_checkout=override_checkout, default_branch=project_default_branch) if job.required_projects: for job_project in job.required_projects.values(): (trusted, project) = tenant.getProject( job_project.project_name) if project is None: raise Exception("Unknown project %s" % (job_project.project_name,)) params['projects'].append( make_project_dict(project, job_project.override_branch, job_project.override_checkout)) projects.add(project) required_projects.add(project) for change in dependent_changes: # We have to find the project this way because it may not # be registered in the tenant (ie, a foreign project). source = self.sched.connections.getSourceByCanonicalHostname( change['project']['canonical_hostname']) project = source.getProject(change['project']['name']) if project not in projects: params['projects'].append(make_project_dict(project)) projects.add(project) for p in projects: zuul_params['projects'][p.canonical_name] = (dict( name=p.name, short_name=p.name.split('/')[-1], # Duplicate this into the dict too, so that iterating # project.values() is easier for callers canonical_name=p.canonical_name, canonical_hostname=p.canonical_hostname, src_dir=os.path.join('src', p.canonical_name), required=(p in required_projects), )) build = Build(job, uuid) build.parameters = params build.nodeset = nodeset self.log.debug("Adding build %s of job %s to item %s" % (build, job, item)) item.addBuild(build) if job.name == 'noop': self.sched.onBuildStarted(build) self.sched.onBuildCompleted(build, 'SUCCESS', {}) return build gearman_job = gear.TextJob('executor:execute', json_dumps(params), unique=uuid) build.__gearman_job = gearman_job build.__gearman_worker = None self.builds[uuid] = build if pipeline.precedence == zuul.model.PRECEDENCE_NORMAL: precedence = gear.PRECEDENCE_NORMAL elif pipeline.precedence == zuul.model.PRECEDENCE_HIGH: precedence = gear.PRECEDENCE_HIGH elif pipeline.precedence == zuul.model.PRECEDENCE_LOW: precedence = gear.PRECEDENCE_LOW try: self.gearman.submitJob(gearman_job, precedence=precedence, timeout=300) except Exception: self.log.exception("Unable to submit job to Gearman") self.onBuildCompleted(gearman_job, 'EXCEPTION') return build if not gearman_job.handle: self.log.error("No job handle was received for %s after" " 300 seconds; marking as lost." % gearman_job) self.onBuildCompleted(gearman_job, 'NO_HANDLE') self.log.debug("Received handle %s for %s" % (gearman_job.handle, build)) return build
def launch(self, job, item, pipeline, dependent_items=[]): uuid = str(uuid4().hex) self.log.info( "Launch job %s (uuid: %s) for change %s with dependent " "changes %s" % ( job, uuid, item.change, [x.change for x in dependent_items])) dependent_items = dependent_items[:] dependent_items.reverse() params = dict(ZUUL_UUID=uuid, ZUUL_PROJECT=item.change.project.name) params['ZUUL_PIPELINE'] = pipeline.name params['ZUUL_URL'] = item.current_build_set.zuul_url if hasattr(item.change, 'refspec'): changes_str = '^'.join( ['%s:%s:%s' % (i.change.project.name, i.change.branch, i.change.refspec) for i in dependent_items + [item]]) params['ZUUL_BRANCH'] = item.change.branch params['ZUUL_CHANGES'] = changes_str params['ZUUL_REF'] = ('refs/zuul/%s/%s' % (item.change.branch, item.current_build_set.ref)) params['ZUUL_COMMIT'] = item.current_build_set.commit zuul_changes = ' '.join(['%s,%s' % (i.change.number, i.change.patchset) for i in dependent_items + [item]]) params['ZUUL_CHANGE_IDS'] = zuul_changes params['ZUUL_CHANGE'] = str(item.change.number) params['ZUUL_PATCHSET'] = str(item.change.patchset) if hasattr(item.change, 'ref'): params['ZUUL_REFNAME'] = item.change.ref params['ZUUL_OLDREV'] = item.change.oldrev params['ZUUL_NEWREV'] = item.change.newrev params['ZUUL_REF'] = item.change.ref params['ZUUL_COMMIT'] = item.change.newrev # The destination_path is a unqiue path for this build request # and generally where the logs are expected to be placed destination_path = os.path.join(item.change.getBasePath(), pipeline.name, job.name, uuid[:7]) params['BASE_LOG_PATH'] = item.change.getBasePath() params['LOG_PATH'] = destination_path # Allow the job to update the params self.updateBuildParams(job, item, params) # This is what we should be heading toward for parameters: # required: # ZUUL_UUID # ZUUL_REF (/refs/zuul/..., /refs/tags/foo, master) # ZUUL_COMMIT # optional: # ZUUL_PROJECT # ZUUL_PIPELINE # optional (changes only): # ZUUL_BRANCH # ZUUL_CHANGE # ZUUL_CHANGE_IDS # ZUUL_PATCHSET # optional (ref updated only): # ZUUL_OLDREV # ZUUL_NEWREV if 'ZUUL_NODE' in params: name = "build:%s:%s" % (job.name, params['ZUUL_NODE']) else: name = "build:%s" % job.name build = Build(job, uuid) build.parameters = params if job.name == 'noop': build.result = 'SUCCESS' self.sched.onBuildCompleted(build) return build gearman_job = gear.Job(name, json.dumps(params), unique=uuid) build.__gearman_job = gearman_job self.builds[uuid] = build if not self.isJobRegistered(gearman_job.name): self.log.error("Job %s is not registered with Gearman" % gearman_job) self.onBuildCompleted(gearman_job, 'NOT_REGISTERED') return build if pipeline.precedence == zuul.model.PRECEDENCE_NORMAL: precedence = gear.PRECEDENCE_NORMAL elif pipeline.precedence == zuul.model.PRECEDENCE_HIGH: precedence = gear.PRECEDENCE_HIGH elif pipeline.precedence == zuul.model.PRECEDENCE_LOW: precedence = gear.PRECEDENCE_LOW try: self.gearman.submitJob(gearman_job, precedence=precedence, timeout=300) except Exception: self.log.exception("Unable to submit job to Gearman") self.onBuildCompleted(gearman_job, 'EXCEPTION') return build if not gearman_job.handle: self.log.error("No job handle was received for %s after" " 300 seconds; marking as lost." % gearman_job) self.onBuildCompleted(gearman_job, 'NO_HANDLE') self.log.debug("Received handle %s for %s" % (gearman_job.handle, build)) return build
def execute(self, job, item, pipeline, dependent_changes=[], merger_items=[]): log = get_annotated_logger(self.log, item.event) tenant = pipeline.tenant uuid = str(uuid4().hex) nodeset = item.current_build_set.getJobNodeSet(job.name) log.info( "Execute job %s (uuid: %s) on nodes %s for change %s " "with dependent changes %s", job, uuid, nodeset, item.change, dependent_changes) project = dict( name=item.change.project.name, short_name=item.change.project.name.split('/')[-1], canonical_hostname=item.change.project.canonical_hostname, canonical_name=item.change.project.canonical_name, src_dir=os.path.join('src', item.change.project.canonical_name), ) zuul_params = dict(build=uuid, buildset=item.current_build_set.uuid, ref=item.change.ref, pipeline=pipeline.name, job=job.name, voting=job.voting, project=project, tenant=tenant.name, timeout=job.timeout, event_id=item.event.zuul_event_id, jobtags=sorted(job.tags), _inheritance_path=list(job.inheritance_path)) if job.artifact_data: zuul_params['artifacts'] = job.artifact_data if job.override_checkout: zuul_params['override_checkout'] = job.override_checkout if hasattr(item.change, 'branch'): zuul_params['branch'] = item.change.branch if hasattr(item.change, 'tag'): zuul_params['tag'] = item.change.tag if hasattr(item.change, 'number'): zuul_params['change'] = str(item.change.number) if hasattr(item.change, 'url'): zuul_params['change_url'] = item.change.url if hasattr(item.change, 'patchset'): zuul_params['patchset'] = str(item.change.patchset) if hasattr(item.change, 'message'): zuul_params['message'] = item.change.message if (hasattr(item.change, 'oldrev') and item.change.oldrev and item.change.oldrev != '0' * 40): zuul_params['oldrev'] = item.change.oldrev if (hasattr(item.change, 'newrev') and item.change.newrev and item.change.newrev != '0' * 40): zuul_params['newrev'] = item.change.newrev zuul_params['projects'] = {} # Set below zuul_params['items'] = dependent_changes zuul_params['child_jobs'] = list(item.job_graph.getDirectDependentJobs( job.name)) params = dict() params['job'] = job.name params['timeout'] = job.timeout params['post_timeout'] = job.post_timeout params['items'] = merger_items params['projects'] = [] if hasattr(item.change, 'branch'): params['branch'] = item.change.branch else: params['branch'] = None params['override_branch'] = job.override_branch params['override_checkout'] = job.override_checkout params['repo_state'] = item.current_build_set.repo_state params['ansible_version'] = job.ansible_version def make_playbook(playbook): d = playbook.toDict() for role in d['roles']: if role['type'] != 'zuul': continue project_metadata = item.layout.getProjectMetadata( role['project_canonical_name']) if project_metadata: role['project_default_branch'] = \ project_metadata.default_branch else: role['project_default_branch'] = 'master' role_trusted, role_project = item.layout.tenant.getProject( role['project_canonical_name']) role_connection = role_project.source.connection role['connection'] = role_connection.connection_name role['project'] = role_project.name return d if job.name != 'noop': params['playbooks'] = [make_playbook(x) for x in job.run] params['pre_playbooks'] = [make_playbook(x) for x in job.pre_run] params['post_playbooks'] = [make_playbook(x) for x in job.post_run] params['cleanup_playbooks'] = [make_playbook(x) for x in job.cleanup_run] nodes = [] for node in nodeset.getNodes(): n = node.toDict() n.update(dict(name=node.name, label=node.label)) nodes.append(n) params['nodes'] = nodes params['groups'] = [group.toDict() for group in nodeset.getGroups()] params['ssh_keys'] = [] if pipeline.post_review: params['ssh_keys'].append(dict( name='%s project key' % item.change.project.canonical_name, key=item.change.project.private_ssh_key)) params['vars'] = job.variables params['extra_vars'] = job.extra_variables params['host_vars'] = job.host_variables params['group_vars'] = job.group_variables params['zuul'] = zuul_params projects = set() required_projects = set() def make_project_dict(project, override_branch=None, override_checkout=None): project_metadata = item.layout.getProjectMetadata( project.canonical_name) if project_metadata: project_default_branch = project_metadata.default_branch else: project_default_branch = 'master' connection = project.source.connection return dict(connection=connection.connection_name, name=project.name, canonical_name=project.canonical_name, override_branch=override_branch, override_checkout=override_checkout, default_branch=project_default_branch) if job.required_projects: for job_project in job.required_projects.values(): (trusted, project) = tenant.getProject( job_project.project_name) if project is None: raise Exception("Unknown project %s" % (job_project.project_name,)) params['projects'].append( make_project_dict(project, job_project.override_branch, job_project.override_checkout)) projects.add(project) required_projects.add(project) for change in dependent_changes: # We have to find the project this way because it may not # be registered in the tenant (ie, a foreign project). source = self.sched.connections.getSourceByCanonicalHostname( change['project']['canonical_hostname']) project = source.getProject(change['project']['name']) if project not in projects: params['projects'].append(make_project_dict(project)) projects.add(project) for p in projects: zuul_params['projects'][p.canonical_name] = (dict( name=p.name, short_name=p.name.split('/')[-1], # Duplicate this into the dict too, so that iterating # project.values() is easier for callers canonical_name=p.canonical_name, canonical_hostname=p.canonical_hostname, src_dir=os.path.join('src', p.canonical_name), required=(p in required_projects), )) params['zuul_event_id'] = item.event.zuul_event_id build = Build(job, uuid, zuul_event_id=item.event.zuul_event_id) build.parameters = params build.nodeset = nodeset log.debug("Adding build %s of job %s to item %s", build, job, item) item.addBuild(build) if job.name == 'noop': self.sched.onBuildStarted(build) self.sched.onBuildCompleted(build, 'SUCCESS', {}, []) return build # Update zuul attempts after addBuild above to ensure build_set # is up to date. attempts = build.build_set.getTries(job.name) zuul_params['attempts'] = attempts functions = getGearmanFunctions(self.gearman) function_name = 'executor:execute' # Because all nodes belong to the same provider, region and # availability zone we can get executor_zone from only the first # node. executor_zone = None if nodes and nodes[0].get('attributes'): executor_zone = nodes[0]['attributes'].get('executor-zone') if executor_zone: _fname = '%s:%s' % ( function_name, executor_zone) if _fname in functions: function_name = _fname else: self.log.warning( "Job requested '%s' zuul-executor zone, but no " "zuul-executors found for this zone; ignoring zone " "request" % executor_zone) gearman_job = gear.TextJob( function_name, json_dumps(params), unique=uuid) build.__gearman_job = gearman_job build.__gearman_worker = None self.builds[uuid] = build if pipeline.precedence == zuul.model.PRECEDENCE_NORMAL: precedence = gear.PRECEDENCE_NORMAL elif pipeline.precedence == zuul.model.PRECEDENCE_HIGH: precedence = gear.PRECEDENCE_HIGH elif pipeline.precedence == zuul.model.PRECEDENCE_LOW: precedence = gear.PRECEDENCE_LOW try: self.gearman.submitJob(gearman_job, precedence=precedence, timeout=300) except Exception: log.exception("Unable to submit job to Gearman") self.onBuildCompleted(gearman_job, 'EXCEPTION') return build if not gearman_job.handle: log.error("No job handle was received for %s after" " 300 seconds; marking as lost.", gearman_job) self.onBuildCompleted(gearman_job, 'NO_HANDLE') log.debug("Received handle %s for %s", gearman_job.handle, build) return build
def launch(self, job, item, pipeline, dependent_items=[]): self.log.info("Launch job %s for change %s with dependent changes %s" % (job, item.change, [x.change for x in dependent_items])) dependent_items = dependent_items[:] dependent_items.reverse() uuid = str(uuid4().hex) params = dict(ZUUL_UUID=uuid, ZUUL_PROJECT=item.change.project.name) params['ZUUL_PIPELINE'] = pipeline.name params['ZUUL_URL'] = item.current_build_set.zuul_url if hasattr(item.change, 'refspec'): changes_str = '^'.join([ '%s:%s:%s' % (i.change.project.name, i.change.branch, i.change.refspec) for i in dependent_items + [item] ]) params['ZUUL_BRANCH'] = item.change.branch params['ZUUL_CHANGES'] = changes_str params['ZUUL_REF'] = ( 'refs/zuul/%s/%s' % (item.change.branch, item.current_build_set.ref)) params['ZUUL_COMMIT'] = item.current_build_set.commit zuul_changes = ' '.join([ '%s,%s' % (i.change.number, i.change.patchset) for i in dependent_items + [item] ]) params['ZUUL_CHANGE_IDS'] = zuul_changes params['ZUUL_CHANGE'] = str(item.change.number) params['ZUUL_PATCHSET'] = str(item.change.patchset) if hasattr(item.change, 'ref'): params['ZUUL_REFNAME'] = item.change.ref params['ZUUL_OLDREV'] = item.change.oldrev params['ZUUL_NEWREV'] = item.change.newrev params['ZUUL_REF'] = item.change.ref params['ZUUL_COMMIT'] = item.change.newrev # The destination_path is a unqiue path for this build request # and generally where the logs are expected to be placed destination_path = os.path.join(item.change.getBasePath(), pipeline.name, job.name, uuid[:7]) params['BASE_LOG_PATH'] = item.change.getBasePath() params['LOG_PATH'] = destination_path # Allow the job to update the params self.updateBuildParams(job, item, params) # This is what we should be heading toward for parameters: # required: # ZUUL_UUID # ZUUL_REF (/refs/zuul/..., /refs/tags/foo, master) # ZUUL_COMMIT # optional: # ZUUL_PROJECT # ZUUL_PIPELINE # optional (changes only): # ZUUL_BRANCH # ZUUL_CHANGE # ZUUL_CHANGE_IDS # ZUUL_PATCHSET # optional (ref updated only): # ZUUL_OLDREV # ZUUL_NEWREV if 'ZUUL_NODE' in params: name = "build:%s:%s" % (job.name, params['ZUUL_NODE']) else: name = "build:%s" % job.name build = Build(job, uuid) build.parameters = params if job.name == 'noop': build.result = 'SUCCESS' self.sched.onBuildCompleted(build) return build gearman_job = gear.Job(name, json.dumps(params), unique=uuid) build.__gearman_job = gearman_job self.builds[uuid] = build if not self.isJobRegistered(gearman_job.name): self.log.error("Job %s is not registered with Gearman" % gearman_job) self.onBuildCompleted(gearman_job, 'NOT_REGISTERED') return build if pipeline.precedence == zuul.model.PRECEDENCE_NORMAL: precedence = gear.PRECEDENCE_NORMAL elif pipeline.precedence == zuul.model.PRECEDENCE_HIGH: precedence = gear.PRECEDENCE_HIGH elif pipeline.precedence == zuul.model.PRECEDENCE_LOW: precedence = gear.PRECEDENCE_LOW try: self.gearman.submitJob(gearman_job, precedence=precedence) except Exception: self.log.exception("Unable to submit job to Gearman") self.onBuildCompleted(gearman_job, 'EXCEPTION') return build if not gearman_job.handle: self.log.error("No job handle was received for %s after 30 seconds" " marking as lost." % gearman_job) self.onBuildCompleted(gearman_job, 'NO_HANDLE') self.log.debug("Received handle %s for %s" % (gearman_job.handle, build)) return build
def launch(self, job, item, pipeline, dependent_items=[]): self.log.info("Launch job %s for change %s with dependent changes %s" % (job, item.change, [x.change for x in dependent_items])) dependent_items = dependent_items[:] dependent_items.reverse() uuid = str(uuid4().hex) params = dict(ZUUL_UUID=uuid, ZUUL_PROJECT=item.change.project.name) params['ZUUL_PIPELINE'] = pipeline.name if hasattr(item.change, 'refspec'): changes_str = '^'.join([ '%s:%s:%s' % (i.change.project.name, i.change.branch, i.change.refspec) for i in dependent_items + [item] ]) params['ZUUL_BRANCH'] = item.change.branch params['ZUUL_CHANGES'] = changes_str params['ZUUL_REF'] = ( 'refs/zuul/%s/%s' % (item.change.branch, item.current_build_set.ref)) params['ZUUL_COMMIT'] = item.current_build_set.commit zuul_changes = ' '.join([ '%s,%s' % (i.change.number, i.change.patchset) for i in dependent_items + [item] ]) params['ZUUL_CHANGE_IDS'] = zuul_changes params['ZUUL_CHANGE'] = str(item.change.number) params['ZUUL_PATCHSET'] = str(item.change.patchset) if hasattr(item.change, 'ref'): params['ZUUL_REFNAME'] = item.change.ref params['ZUUL_OLDREV'] = item.change.oldrev params['ZUUL_NEWREV'] = item.change.newrev params['ZUUL_REF'] = item.change.ref params['ZUUL_COMMIT'] = item.change.newrev # This is what we should be heading toward for parameters: # required: # ZUUL_UUID # ZUUL_REF (/refs/zuul/..., /refs/tags/foo, master) # ZUUL_COMMIT # optional: # ZUUL_PROJECT # ZUUL_PIPELINE # optional (changes only): # ZUUL_BRANCH # ZUUL_CHANGE # ZUUL_CHANGE_IDS # ZUUL_PATCHSET # optional (ref updated only): # ZUUL_OLDREV # ZUUL_NEWREV if callable(job.parameter_function): pargs = inspect.getargspec(job.parameter_function) if len(pargs.args) == 2: job.parameter_function(item, params) else: job.parameter_function(item, job, params) self.log.debug("Custom parameter function used for job %s, " "change: %s, params: %s" % (job, item.change, params)) if 'ZUUL_NODE' in params: name = "build:%s:%s" % (job.name, params['ZUUL_NODE']) else: name = "build:%s" % job.name build = Build(job, uuid) build.parameters = params gearman_job = gear.Job(name, json.dumps(params), unique=uuid) build.__gearman_job = gearman_job self.builds[uuid] = build if not self.isJobRegistered(gearman_job.name): self.log.error("Job %s is not registered with Gearman" % gearman_job) self.onBuildCompleted(gearman_job, 'LOST') return build if pipeline.precedence == zuul.model.PRECEDENCE_NORMAL: precedence = gear.PRECEDENCE_NORMAL elif pipeline.precedence == zuul.model.PRECEDENCE_HIGH: precedence = gear.PRECEDENCE_HIGH elif pipeline.precedence == zuul.model.PRECEDENCE_LOW: precedence = gear.PRECEDENCE_LOW try: self.gearman.submitJob(gearman_job, precedence=precedence) except Exception: self.log.exception("Unable to submit job to Gearman") self.onBuildCompleted(gearman_job, 'LOST') return build if not gearman_job.handle: self.log.error("No job handle was received for %s after 30 seconds" " marking as lost." % gearman_job) self.onBuildCompleted(gearman_job, 'LOST') return build
def launch(self, job, item, pipeline, dependent_items=[]): self.log.info("Launch job %s for change %s with dependent changes %s" % (job, item.change, [x.change for x in dependent_items])) dependent_items = dependent_items[:] dependent_items.reverse() uuid = str(uuid4().hex) params = dict(ZUUL_UUID=uuid, ZUUL_PROJECT=item.change.project.name) params['ZUUL_PIPELINE'] = pipeline.name params['ZUUL_URL'] = self.zuul_server if hasattr(item.change, 'refspec'): changes_str = '^'.join( ['%s:%s:%s' % (i.change.project.name, i.change.branch, i.change.refspec) for i in dependent_items + [item]]) params['ZUUL_BRANCH'] = item.change.branch params['ZUUL_CHANGES'] = changes_str params['ZUUL_REF'] = ('refs/zuul/%s/%s' % (item.change.branch, item.current_build_set.ref)) params['ZUUL_COMMIT'] = item.current_build_set.commit zuul_changes = ' '.join(['%s,%s' % (i.change.number, i.change.patchset) for i in dependent_items + [item]]) params['ZUUL_CHANGE_IDS'] = zuul_changes params['ZUUL_CHANGE'] = str(item.change.number) params['ZUUL_PATCHSET'] = str(item.change.patchset) if hasattr(item.change, 'ref'): params['ZUUL_REFNAME'] = item.change.ref params['ZUUL_OLDREV'] = item.change.oldrev params['ZUUL_NEWREV'] = item.change.newrev params['ZUUL_REF'] = item.change.ref params['ZUUL_COMMIT'] = item.change.newrev # This is what we should be heading toward for parameters: # required: # ZUUL_UUID # ZUUL_REF (/refs/zuul/..., /refs/tags/foo, master) # ZUUL_COMMIT # optional: # ZUUL_PROJECT # ZUUL_PIPELINE # optional (changes only): # ZUUL_BRANCH # ZUUL_CHANGE # ZUUL_CHANGE_IDS # ZUUL_PATCHSET # optional (ref updated only): # ZUUL_OLDREV # ZUUL_NEWREV if callable(job.parameter_function): pargs = inspect.getargspec(job.parameter_function) if len(pargs.args) == 2: job.parameter_function(item, params) else: job.parameter_function(item, job, params) self.log.debug("Custom parameter function used for job %s, " "change: %s, params: %s" % (job, item.change, params)) if 'ZUUL_NODE' in params: name = "build:%s:%s" % (job.name, params['ZUUL_NODE']) else: name = "build:%s" % job.name build = Build(job, uuid) build.parameters = params gearman_job = gear.Job(name, json.dumps(params), unique=uuid) build.__gearman_job = gearman_job self.builds[uuid] = build if not self.isJobRegistered(gearman_job.name): self.log.error("Job %s is not registered with Gearman" % gearman_job) self.onBuildCompleted(gearman_job, 'LOST') return build if pipeline.precedence == zuul.model.PRECEDENCE_NORMAL: precedence = gear.PRECEDENCE_NORMAL elif pipeline.precedence == zuul.model.PRECEDENCE_HIGH: precedence = gear.PRECEDENCE_HIGH elif pipeline.precedence == zuul.model.PRECEDENCE_LOW: precedence = gear.PRECEDENCE_LOW try: self.gearman.submitJob(gearman_job, precedence=precedence) except Exception: self.log.exception("Unable to submit job to Gearman") self.onBuildCompleted(gearman_job, 'LOST') return build if not gearman_job.handle: self.log.error("No job handle was received for %s after 30 seconds" " marking as lost." % gearman_job) self.onBuildCompleted(gearman_job, 'LOST') return build
def execute(self, job, item, pipeline, dependent_changes=[], merger_items=[]): tenant = pipeline.tenant uuid = str(uuid4().hex) nodeset = item.current_build_set.getJobNodeSet(job.name) self.log.info( "Execute job %s (uuid: %s) on nodes %s for change %s " "with dependent changes %s" % ( job, uuid, nodeset, item.change, dependent_changes)) project = dict( name=item.change.project.name, short_name=item.change.project.name.split('/')[-1], canonical_hostname=item.change.project.canonical_hostname, canonical_name=item.change.project.canonical_name, src_dir=os.path.join('src', item.change.project.canonical_name), ) zuul_params = dict(build=uuid, buildset=item.current_build_set.uuid, ref=item.change.ref, pipeline=pipeline.name, job=job.name, voting=job.voting, project=project, tenant=tenant.name, timeout=job.timeout, jobtags=sorted(job.tags), _inheritance_path=list(job.inheritance_path)) if job.artifact_data: zuul_params['artifacts'] = job.artifact_data if job.override_checkout: zuul_params['override_checkout'] = job.override_checkout if hasattr(item.change, 'branch'): zuul_params['branch'] = item.change.branch if hasattr(item.change, 'tag'): zuul_params['tag'] = item.change.tag if hasattr(item.change, 'number'): zuul_params['change'] = str(item.change.number) if hasattr(item.change, 'url'): zuul_params['change_url'] = item.change.url if hasattr(item.change, 'patchset'): zuul_params['patchset'] = str(item.change.patchset) if hasattr(item.change, 'message'): zuul_params['message'] = item.change.message if (hasattr(item.change, 'oldrev') and item.change.oldrev and item.change.oldrev != '0' * 40): zuul_params['oldrev'] = item.change.oldrev if (hasattr(item.change, 'newrev') and item.change.newrev and item.change.newrev != '0' * 40): zuul_params['newrev'] = item.change.newrev zuul_params['projects'] = {} # Set below zuul_params['items'] = dependent_changes zuul_params['child_jobs'] = list(item.job_graph.getDirectDependentJobs( job.name)) params = dict() params['job'] = job.name params['timeout'] = job.timeout params['post_timeout'] = job.post_timeout params['items'] = merger_items params['projects'] = [] if hasattr(item.change, 'branch'): params['branch'] = item.change.branch else: params['branch'] = None params['override_branch'] = job.override_branch params['override_checkout'] = job.override_checkout params['repo_state'] = item.current_build_set.repo_state params['ansible_version'] = job.ansible_version def make_playbook(playbook): d = playbook.toDict() for role in d['roles']: if role['type'] != 'zuul': continue project_metadata = item.layout.getProjectMetadata( role['project_canonical_name']) if project_metadata: role['project_default_branch'] = \ project_metadata.default_branch else: role['project_default_branch'] = 'master' role_trusted, role_project = item.layout.tenant.getProject( role['project_canonical_name']) role_connection = role_project.source.connection role['connection'] = role_connection.connection_name role['project'] = role_project.name return d if job.name != 'noop': params['playbooks'] = [make_playbook(x) for x in job.run] params['pre_playbooks'] = [make_playbook(x) for x in job.pre_run] params['post_playbooks'] = [make_playbook(x) for x in job.post_run] nodes = [] for node in nodeset.getNodes(): n = node.toDict() n.update(dict(name=node.name, label=node.label)) nodes.append(n) params['nodes'] = nodes params['groups'] = [group.toDict() for group in nodeset.getGroups()] params['ssh_keys'] = [] if pipeline.post_review: params['ssh_keys'].append(dict( name='%s project key' % item.change.project.canonical_name, key=item.change.project.private_ssh_key)) params['vars'] = job.variables params['extra_vars'] = job.extra_variables params['host_vars'] = job.host_variables params['group_vars'] = job.group_variables params['zuul'] = zuul_params projects = set() required_projects = set() def make_project_dict(project, override_branch=None, override_checkout=None): project_metadata = item.layout.getProjectMetadata( project.canonical_name) if project_metadata: project_default_branch = project_metadata.default_branch else: project_default_branch = 'master' connection = project.source.connection return dict(connection=connection.connection_name, name=project.name, canonical_name=project.canonical_name, override_branch=override_branch, override_checkout=override_checkout, default_branch=project_default_branch) if job.required_projects: for job_project in job.required_projects.values(): (trusted, project) = tenant.getProject( job_project.project_name) if project is None: raise Exception("Unknown project %s" % (job_project.project_name,)) params['projects'].append( make_project_dict(project, job_project.override_branch, job_project.override_checkout)) projects.add(project) required_projects.add(project) for change in dependent_changes: # We have to find the project this way because it may not # be registered in the tenant (ie, a foreign project). source = self.sched.connections.getSourceByCanonicalHostname( change['project']['canonical_hostname']) project = source.getProject(change['project']['name']) if project not in projects: params['projects'].append(make_project_dict(project)) projects.add(project) for p in projects: zuul_params['projects'][p.canonical_name] = (dict( name=p.name, short_name=p.name.split('/')[-1], # Duplicate this into the dict too, so that iterating # project.values() is easier for callers canonical_name=p.canonical_name, canonical_hostname=p.canonical_hostname, src_dir=os.path.join('src', p.canonical_name), required=(p in required_projects), )) build = Build(job, uuid) build.parameters = params build.nodeset = nodeset self.log.debug("Adding build %s of job %s to item %s" % (build, job, item)) item.addBuild(build) if job.name == 'noop': self.sched.onBuildStarted(build) self.sched.onBuildCompleted(build, 'SUCCESS', {}, []) return build functions = getGearmanFunctions(self.gearman) function_name = 'executor:execute' # Because all nodes belong to the same provider, region and # availability zone we can get executor_zone from only the first # node. executor_zone = None if nodes and nodes[0].get('attributes'): executor_zone = nodes[0]['attributes'].get('executor-zone') if executor_zone: _fname = '%s:%s' % ( function_name, executor_zone) if _fname in functions: function_name = _fname else: self.log.warning( "Job requested '%s' zuul-executor zone, but no " "zuul-executors found for this zone; ignoring zone " "request" % executor_zone) gearman_job = gear.TextJob( function_name, json_dumps(params), unique=uuid) build.__gearman_job = gearman_job build.__gearman_worker = None self.builds[uuid] = build if pipeline.precedence == zuul.model.PRECEDENCE_NORMAL: precedence = gear.PRECEDENCE_NORMAL elif pipeline.precedence == zuul.model.PRECEDENCE_HIGH: precedence = gear.PRECEDENCE_HIGH elif pipeline.precedence == zuul.model.PRECEDENCE_LOW: precedence = gear.PRECEDENCE_LOW try: self.gearman.submitJob(gearman_job, precedence=precedence, timeout=300) except Exception: self.log.exception("Unable to submit job to Gearman") self.onBuildCompleted(gearman_job, 'EXCEPTION') return build if not gearman_job.handle: self.log.error("No job handle was received for %s after" " 300 seconds; marking as lost." % gearman_job) self.onBuildCompleted(gearman_job, 'NO_HANDLE') self.log.debug("Received handle %s for %s" % (gearman_job.handle, build)) return build