class JenkinsAPI(object): def __init__(self): self.__server = Jenkins(settings.JENKINS.get('URI')) def get_all_jobs(self): return self.__server.get_all_jobs() def get_next_build_number(self, name): return self.__server.get_job_info(name)['nextBuildNumber'] def build_job(self, name, parameters=None): return self.__server.build_job(name=name, parameters=parameters) def get_build_info(self, name, build_number): try: return self.__server.get_build_info(name, build_number) except Exception as e: logger.exception(e) return None def get_build_console_output(self, name, number): try: return self.__server.get_build_console_output(name, number) except JenkinsException as e: logger.exception(e) return None def download_package(self, package_url, name, build_number): URI = settings.JENKINS.get('URI') download_url = '{}/job/{}/{}/artifact/{}'.format( URI, name, build_number, package_url) logger.debug(download_url) local_filename = download_url.split('/')[-1] code_path = os.path.join(settings.DEPLOY.get('CODE_PATH'), 'packages') local_full_filename = os.path.join(code_path, local_filename) # with requests.get(url, stream=True, # auth=HTTPBasicAuth("zhoujinliang", "117c911a35acf51e428e29f3ccb363f53f")) as r: with requests.get(download_url, stream=True) as r: r.raise_for_status() with open(local_full_filename, 'wb') as f: for chunk in r.iter_content(chunk_size=8192): if chunk: # filter out keep-alive new chunks f.write(chunk) return local_full_filename def cancel_build(self, name, queue_id, build_number): try: self.__server.cancel_queue(queue_id) self.__server.stop_build(name, build_number) except Exception as e: logger.error(e) def test(self): return self.__server.cancel_queue(3590) # # return self.__server.stop_build('devops', 98) return self.__server.get_queue_info()
def filter_jobs(jobs, cluster: Cluster, server: Jenkins, options, queue, already_rerun): logger.info("filtering {} jobs".format(len(jobs))) running_builds = get_running_builds(server) latest_builds = latest_jenkins_builds(options) for job in jobs: run_url = job["url"] + str(job["build_id"]) if job["name"] in queue or run_url in already_rerun: continue try: reasons = [] if job["result"] == "ABORTED": reasons.append("aborted") if job["result"] == "FAILURE": reasons.append("failure") if job["failCount"] == job["totalCount"]: reasons.append("no tests passed") if job["failCount"] > 0: reasons.append("failed tests") rerun_was_worse = rerun_worse(cluster, job, options) # if rerun was worse we skip these checks if not rerun_was_worse: if not passes_max_rerun_filter(cluster, job, options): logger.debug( "skipping {} (already rerun max times)".format( job["name"])) continue job_name = job_name_from_url(options.jenkins_url, job['url']) parameters = parameters_for_job(server, job_name, job['build_id'], job['build'], options.s3_logs_url) if "dispatcher_params" in parameters: dispatcher_params = json.loads( parameters['dispatcher_params'][11:]) parameters["dispatcher_params"] = dispatcher_params # TODO: Remove when CBQE-6336 fixed if "component" not in dispatcher_params: logger.debug( "skipping {} (invalid dispatcher_params)".format( job["name"])) continue # only run dispatcher jobs if "dispatcher_params" not in parameters and options.dispatcher_jobs: logger.debug("skipping {} (non dispatcher job)".format( job["name"])) continue is_newer, newer_builds = newer_build_in_jenkins( job_name, job, parameters, latest_builds, options) already_running = get_duplicate_jobs(running_builds, job_name, parameters, options) if is_newer: should_skip = False for build in newer_builds: if build not in already_running: should_skip = True break if should_skip: logger.debug("skipping {} (newer build in jenkins)".format( job["name"])) continue if len(already_running) > 0: if options.stop: should_skip = False for build in already_running: if "number" in build: logger.info("aborting {}/{}".format( build['name'], build['number'])) if not options.noop: server.stop_build(build['name'], build['number']) else: should_skip = True if should_skip: # duplicate queued job, don't stop it logger.debug("skipping {} (already queued)".format( job["name"])) continue else: logger.debug( "skipping {} (already running or waiting to be dispatched)" .format(job["name"])) continue if not passes_component_filter(job, parameters, options): logger.debug("skipping {} (component not included)".format( job["name"])) continue if not passes_os_filter(job, parameters, options): logger.debug("skipping {} (os not included)".format( job["name"])) continue if options.strategy: if options.strategy == "common": query = "select raw count(*) from server where name = '{}' and `build` in {}".format( job['name'], options.previous_builds) query = filter_query(query, options) common_count = list(cluster.query(query))[0] # job wasn't common across all previous builds if common_count != len(options.previous_builds): logger.debug( "skipping {} (not common across all previous builds)" .format(job["name"])) continue else: reasons.append("common") elif options.strategy == "regression": # regression (if name in previous build and failCount or totalCount was different) query = "select raw os.`{}`.`{}`.`{}` from greenboard where `build` = '{}' and type = 'server'".format( job["os"], job["component"], job["name"], options.previous_builds[0]) previous_job_runs = list(cluster.query(query)) if len(previous_job_runs) == 1: previous_job_runs = previous_job_runs[0] else: previous_job_runs = [] if previous_job_runs is None: previous_job_runs = [] previous_job = next( filter(lambda run: run["olderBuild"] == False, previous_job_runs), None) # if no previous job then this is either a new job or # that job wasn't run last time so don't filter if previous_job is not None: prev_fail_count = int(previous_job['failCount']) prev_total_count = int(previous_job['totalCount']) prev_result = previous_job["result"] curr_fail_count = int(job['failCount']) curr_total_count = int(job['totalCount']) curr_result = job["result"] # to reduce total time, don't rerun consistently aborted jobs even if the counts are different if prev_result == "ABORTED" and curr_result == "ABORTED": logger.debug( "skipping {} (consistently aborted)".format( job["name"])) continue if prev_fail_count == curr_fail_count and prev_total_count == curr_total_count and prev_result == curr_result: logger.debug("skipping {} (not regression)".format( job["name"])) continue else: reasons.append("regression") job["prev_total_count"] = prev_total_count job["prev_fail_count"] = prev_fail_count job["prev_pass_count"] = prev_total_count - prev_fail_count job["prev_build_id"] = int( previous_job["build_id"]) job["prev_result"] = prev_result if len(reasons) == 0: reasons.append("forced") job["parameters"] = parameters job["reasons_for_rerun"] = reasons queue[job["name"]] = job already_rerun.add(run_url) except Exception: traceback.print_exc() continue
class JenkinsNotifier(BotPlugin): """JenkinsBot is an Err plugin to manage Jenkins CI jobs from your chat platform like Slack.""" status = {'blue':'SUCCESS', 'blue_anime':'IN PROGRESS','red': 'FAILED', 'red_anime': 'IN PROGRESS', 'disabled': 'DISABLED', 'aborted':'ABORTED', 'notbuilt': 'NOTBUILT', 'yellow': 'UNSTABLE'} failedjobsstring = " " def __init__(self, bot, name): self.jenkins = Jenkins(JENKINS_URL, JENKINS_USERNAME, JENKINS_TOKEN) super().__init__(bot, name) @botcmd(split_args_with=None) def jn_build(self, msg, args): """Build the job specified by jobName. You can add params!""" # Params are passed like "key1=value1 key2=value2" params = {} #try: # for arg in args[1:]: # params[arg.split('=', 1)[0]] = arg.split('=', 1)[1] #except IndexError: # return "I don'G like that params! Try with this format: key1=value1 key2=value2..." jobName = ''.join([args[0],' ',args[1]]) #TODO handle jobname with spaces in it, space is cosidered argument splitter try: self.jenkins.build_job(jobName, params) except NotFoundException: return ' '.join(["Sorry, I can't find the job. Typo maybe?"," ARGS=",jobName]) return ' '.join(["The job", args[0].strip(), "has been sent to the queue to be built."]) @botcmd def jn_cancel(self, msg, args): """Cancel a job in the queue by jobId.""" try: self.jenkins.cancel_queue(args.strip()) except NotFoundException: return "Sorry, I can't find the job. Maybe the ID does not exist." return "Job canceled from the queue." @botcmd def jn_list(self, msg, args): """List Jenkins jobs. You can filter with strings.""" self.send(msg.to, "I'm getting the jobs list from Jenkins...") search_term = args.strip().lower() jobs = [job for job in self.jenkins.get_jobs() if search_term.lower() in job['name'].lower()] return self.format_jobs(jobs) @botcmd def jn_status(self, msg, args): """List Jenkins jobs with their current status.""" self.send(msg.to, "I'm getting the jobs with status Jenkins...") search_term = args.strip().lower() jobs = [job for job in self.jenkins.get_jobs() if search_term.lower() in job['fullname'].lower()] return self.format_job_status(jobs) @botcmd def jn_describe(self, msg, args): """Describe the job specified by jobName.""" try: job = self.jenkins.get_job_info(args.strip()) except NotFoundException: return "Sorry, I can't find the job. Typo maybe?" return ''.join([ 'Name: ', job['name'], '\n', 'URL: ', job['url'], '\n', 'Description: ', 'None' if job['description'] is None else job['description'], '\n', 'Next Build Number: ', str('None' if job['nextBuildNumber'] is None else job['nextBuildNumber']), '\n', 'Last Successful Build Number: ', str('None' if job['lastBuild'] is None else job['lastBuild']['number']), '\n', 'Last Successful Build URL: ', 'None' if job['lastBuild'] is None else job['lastBuild']['url'], '\n' ]) @botcmd def jn_running(self, msg, args): """List running jobs.""" self.send(msg.to, "I will ask for the current running builds list!") jobs = self.jenkins.get_running_builds() return self.format_running_jobs(jobs) @botcmd(split_args_with=None) def jn_stop(self, msg, args): """Stop the building job specified by jobName and jobNumber.""" try: int(args[1].strip()) except ValueError: return "You have to specify the jobNumber: \"!jenkins stop <jobName> <jobNumber>" try: self.jenkins.stop_build(args[0].strip(), int(args[1].strip())) except NotFoundException: return "Sorry, I can't find the job. Typo maybe?" return ' '.join(["The job", args[0].strip(), "has been stopped."]) @botcmd def jn_queue(self, msg, args): """List jobs in queue.""" self.send(msg.to, "Getting the job queue...") jobs = self.jenkins.get_queue_info() return self.format_queue_jobs(jobs) @botcmd def jn_msgtimer(self, msg, args): """Sends messages at fix intervals.""" yield "Starting timer" self.start_poller(5, self.my_callback) self.send(msg.to, "Boo! Bet you weren't expecting me, were you?", ) def my_callback(self): self.log.info('I am called every 5sec') self.send(self.build_identifier("#errbottestchannel"), "I am called every 5sec", ) @botcmd def jn_failed(self, msg, args): """List Jenkins jobs with failed status.""" self.send(msg.to, "I'm getting the failed jobs ...") failedJobs = [] search_term = args.strip().lower() jobs = [job for job in self.jenkins.get_jobs() if search_term.lower() in job['fullname'].lower()] for job in jobs: if self.status[job['color']] == 'FAILED': failedJobs.append(job) return self.format_job_status(failedJobs) # SUPPORT FUNCTIONS START HERE def format_jobs(self, jobs): """Format jobs list""" if len(jobs) == 0: return "I haven't found any job." max_length = max([len(job['name']) for job in jobs]) return '\n'.join(['%s (%s)' % (job['name'].ljust(max_length), job['url']) for job in jobs]).strip() def format_queue_jobs(self, jobs): """Format queue jobs list""" if len(jobs) == 0: return "It seems that there is not jobs in queue." return '\n'.join(['%s - %s (%s)' % (str(job['id']), job['task']['name'], job['task']['url']) for job in jobs]).strip() def format_running_jobs(self, jobs): """Format running jobs list""" if len(jobs) == 0: return "There is no running jobs!" return '\n'.join(['%s - %s (%s) - %s' % (str(job['number']), job['name'], job['url'], job['executor']) for job in jobs]).strip() @botcmd def jn_queue(self, msg, args): """List jobs in queue.""" self.send(msg.to, "Getting the job queue...") jobs = self.jenkins.get_queue_info() return self.format_queue_jobs(jobs) def format_jobs(self, jobs): """Format jobs list""" if len(jobs) == 0: return "I haven't found any job." max_length = max([len(job['name']) for job in jobs]) return '\n'.join(['%s (%s)' % (job['name'].ljust(max_length), job['url']) for job in jobs]).strip() def format_queue_jobs(self, jobs): """Format queue jobs list""" if len(jobs) == 0: return "It seems that there is not jobs in queue." return '\n'.join(['%s - %s (%s)' % (str(job['id']), job['task']['name'], job['task']['url']) for job in jobs]).strip() def format_running_jobs(self, jobs): """Format running jobs list""" if len(jobs) == 0: return "There is no running jobs!" return '\n'.join(['%s - %s (%s) - %s' % (str(job['number']), job['name'], job['url'], job['executor']) for job in jobs]).strip() def format_job_status(self, jobs): """Format job status""" if len(jobs) == 0: return "there are no jobs to return" return '\n'.join(['%s (%s)' % (job['fullname'], self.status[job['color']]) for job in jobs]).strip()
class JenkinsBot(BotPlugin): """JenkinsBot is an Err plugin to manage Jenkins CI jobs from your chat platform like Slack.""" def __init__(self, bot): self.jenkins = Jenkins(JENKINS_URL, JENKINS_USERNAME, JENKINS_PASSWORD) super().__init__(bot) @botcmd(split_args_with=None) def jenkins_build(self, msg, args): """Build the job specified by jobName. You can add params!""" # Params are passed like "key1=value1 key2=value2" params = {} try: for arg in args[1:]: params[arg.split('=', 1)[0]] = arg.split('=', 1)[1] except IndexError: return "I don't like that params! Try with this format: key1=value1 key2=value2..." try: self.jenkins.build_job(args[0].strip(), params) except NotFoundException: return "Sorry, I can't find the job. Typo maybe?" return ' '.join([ "The job", args[0].strip(), "has been sent to the queue to be built." ]) @botcmd def jenkins_cancel(self, msg, args): """Cancel a job in the queue by jobId.""" try: self.jenkins.cancel_queue(args.strip()) except NotFoundException: return "Sorry, I can't find the job. Maybe the ID does not exist." return "Job canceled from the queue." @botcmd def jenkins_list(self, msg, args): """List Jenkins jobs. You can filter with strings.""" self.send(msg.to, "I'm getting the jobs list from Jenkins...") search_term = args.strip().lower() jobs = [ job for job in self.jenkins.get_jobs() if search_term.lower() in job['name'].lower() ] return self.format_jobs(jobs) @botcmd def jenkins_describe(self, msg, args): """Describe the job specified by jobName.""" try: job = self.jenkins.get_job_info(args.strip()) except NotFoundException: return "Sorry, I can't find the job. Typo maybe?" return ''.join([ 'Name: ', job['name'], '\n', 'URL: ', job['url'], '\n', 'Description: ', 'None' if job['description'] is None else job['description'], '\n', 'Next Build Number: ', str('None' if job['nextBuildNumber'] is None else job['nextBuildNumber']), '\n', 'Last Successful Build Number: ', str('None' if job['lastBuild'] is None else job['lastBuild'] ['number']), '\n', 'Last Successful Build URL: ', 'None' if job['lastBuild'] is None else job['lastBuild']['url'], '\n' ]) @botcmd def jenkins_running(self, msg, args): """List running jobs.""" self.send(msg.to, "I will ask for the current running builds list!") jobs = self.jenkins.get_running_builds() return self.format_running_jobs(jobs) @botcmd(split_args_with=None) def jenkins_stop(self, msg, args): """Stop the building job specified by jobName and jobNumber.""" try: int(args[1].strip()) except ValueError: return "You have to specify the jobNumber: \"!jenkins stop <jobName> <jobNumber>" try: self.jenkins.stop_build(args[0].strip(), int(args[1].strip())) except NotFoundException: return "Sorry, I can't find the job. Typo maybe?" return ' '.join(["The job", args[0].strip(), "has been stopped."]) @botcmd def jenkins_queue(self, msg, args): """List jobs in queue.""" self.send(msg.to, "Getting the job queue...") jobs = self.jenkins.get_queue_info() return self.format_queue_jobs(jobs) def format_jobs(self, jobs): """Format jobs list""" if len(jobs) == 0: return "I haven't found any job." max_length = max([len(job['name']) for job in jobs]) return '\n'.join([ '%s (%s)' % (job['name'].ljust(max_length), job['url']) for job in jobs ]).strip() def format_queue_jobs(self, jobs): """Format queue jobs list""" if len(jobs) == 0: return "It seems that there is not jobs in queue." return '\n'.join([ '%s - %s (%s)' % (str(job['id']), job['task']['name'], job['task']['url']) for job in jobs ]).strip() def format_running_jobs(self, jobs): """Format running jobs list""" if len(jobs) == 0: return "There is no running jobs!" return '\n'.join([ '%s - %s (%s) - %s' % (str(job['number']), job['name'], job['url'], job['executor']) for job in jobs ]).strip()
def filter_jobs(jobs, cluster: Cluster, server: Jenkins, options, already_rerun): running_builds = get_running_builds(server) filtered_jobs = [] for job in jobs: if job['name'] in already_rerun: continue try: if not passes_max_rerun_filter(cluster, job, options): continue job_name = job_name_from_url(options.jenkins_url, job['url']) parameters = parameters_for_job(server, job_name, job['build_id'], job['build'], options.s3_logs_url) # only run dispatcher jobs if "dispatcher_params" not in parameters and options.dispatcher_jobs: continue duplicates = get_duplicate_jobs(running_builds, job_name, parameters) if len(duplicates) > 0: if options.stop: for build in duplicates: logger.info("aborting {}/{}".format( build['name'], build['number'])) if not options.noop: server.stop_build(build['name'], build['number']) else: continue if not passes_component_filter(job, parameters, options): continue if options.strategy: if options.strategy == "common": query = "select raw count(*) from server where name = '{}' and `build` in {}".format( job['name'], options.previous_builds) query = filter_query(query, options) common_count = list(cluster.query(query))[0] # job wasn't common across all previous builds if common_count != len(options.previous_builds): continue elif options.strategy == "regression": # regression (if name in previous build and failCount or totalCount was different) query = "select failCount, totalCount from server where `build` = '{}' and name = '{}'".format( options.previous_builds[0], job['name']) previous_job = list(cluster.query(query)) # if no previous job then this is either a new job or # that job wasn't run last time so don't filter if len(previous_job) == 1: prev_fail_count = int(previous_job[0]['failCount']) prev_total_count = int(previous_job[0]['totalCount']) curr_fail_count = int(job['failCount']) curr_total_count = int(job['totalCount']) if prev_fail_count == curr_fail_count and prev_total_count == curr_total_count: continue job["parameters"] = parameters filtered_jobs.append(job) already_rerun.append(job["name"]) except Exception: traceback.print_exc() continue return filtered_jobs
class JenkinsTools(object): """ Jenkins操作工具类 """ def __init__(self, url, username, password): """ 初始化Jenkins连接信息 :param url: Jenkins地址 :param username: 登录用户名 :param password: 登录密码 """ try: self.session = Jenkins(url=url, username=username, password=password) except Exception: logger.exception('初始化Jenkins连接过程中发生异常,请检查!') @property def get_session(self): """ 返回Jenkins会话 :return: """ return self.session def get_job_info(self, job_name): """ 获取构建项目信息 :param job_name: 构建项目名称 :return: """ try: if self.session.job_exists(name=job_name): info = self.session.get_job_info(name=job_name) return info else: logger.warning( '[WARNING]: Jenkins构建项目"{}"并不存在,请检查!'.format(job_name)) sys.exit(1) except Exception: logger.exception('查看Jenkins构建项目"{}"过程中发生异常,请检查!'.format(job_name)) def get_job_info_by_regex(self, pattern): """ 正则表达式方式获取构建项目信息 :param pattern: 构建项目名称(正则表达式) :return: """ try: info_regex = self.session.get_job_info_regex(pattern=pattern) return info_regex except Exception: logger.exception('通过正则表达式"{}"查看匹配构建项目过程中发生异常,请检查!'.format(pattern)) def get_job_build_info(self, job_name, build_number): """ 通过构建项目名称及构建任务ID查看构建信息 :param job_name: 构建项目名称 :param build_number: 构建ID :return: """ try: result = self.session.get_build_info(name=job_name, number=build_number) return result except Exception: logger.exception('通过构建项目名称"{}"和构建ID"{}"查看构建信息过程中发生异常,请检查!'.format( job_name, build_number)) def rebase_build_info(self, job_name): """ 获取指定项目名称的自定义构建信息 :param job_name: 构建项目名称 :return: 构建信息自定义响应体 """ response_list = [] try: job_set = self.get_job_info(job_name) if job_set['lastBuild'] is not None: response_list.append( dict( ActionJob=job_set['displayName'], LastNumber=job_set['lastBuild']['number'], Building=self.get_job_build_info( job_set['displayName'], job_set['lastBuild']['number'])['building'], Result=self.get_job_build_info( job_set['displayName'], job_set['lastBuild']['number'])['result'], Time=trans_timestamp( self.get_job_build_info( job_set['displayName'], job_set['lastBuild']['number'])['timestamp']))) else: response_list.append( dict(ActionJob=job_set['displayName'], LastBuild="No latest build job info now.")) return response_list except Exception: logger.exception( '通过构建项目名称"{}"查看其自定义构建信息过程中发生异常,请检查!'.format(job_name)) def rebase_build_info_by_regex(self, pattern): """ 获取匹配正则表达式项目名称的自定义构建信息 :param pattern: 构建项目名称(正则表达式) :return: 构建信息自定义响应体 """ response_list = [] try: job_set = self.get_job_info_by_regex(pattern) if len(job_set) != 0: for i in job_set: if i['lastBuild'] is not None: response_list.append( dict(ActionJob=i['displayName'], LastNumber=i['lastBuild']['number'], Building=self.get_build_info( i['displayName'], i['lastBuild']['number'])['building'], Result=self.get_build_info( i['displayName'], i['lastBuild']['number'])['result'], Time=trans_timestamp( self.get_build_info( i['displayName'], i['lastBuild'] ['number'])['timestamp']), Operator=self.get_build_info( i['displayName'], i['lastBuild']['number'])['actions'][0] ['causes'][0]['shortDescription'])) else: response_list.append( dict(ActionJob=i['displayName'], LastBuild="No latest build job info now.")) return response_list except re.error: return "invalid pattern" except Exception: logger.exception( '通过正则表达式"{}"查看匹配项目的自定义构建信息过程中发生异常,请检查!'.format(pattern)) def stop_job_latest_build(self, job_name): """ 停止匹配正则表达式的构建项目的最近构建任务 :param job_name: 构建项目名称 :return: """ try: lastNumber = self.rebase_build_info( job_name=job_name)[0]['LastNumber'] self.session.stop_build(name=job_name, number=lastNumber) except Exception: logger.exception( '停止匹配正则表达式"{}"的构建项目的最近构建任务过程中发生异常,请检查!'.format(job_name))
class jenkinscls(object): def __init__(self): self.url = config.JENKINS_URL self.username = config.JENKINS_NAME self.token = config.JENKINS_TOKEN self.j = Jenkins(config.JENKINS_URL, username=config.JENKINS_NAME, password=config.JENKINS_TOKEN) def getjobnames(self, strval=''): rs = {r'...': r'/'} s = utils.multiple_replace(str(strval), rs).split('/') return s[0], "/".join(s[1:]) def getlenstr(self, strval, n): return str(strval)[0:n] def getstatus(self, strval): if str(strval) == 'FAILURE': return 'error' elif str(strval) == 'ABORTED': return 'aborted' elif str(strval) == 'SUCCESS': return 'success' else: return 'started' def edit_userjob_config(self, jn, obj): n, r = self.getjobnames(jn) try: desobj = callback() desobj.des = obj['description'] desobj.callback_url = "" desobj.build_id = '' desobj.duration = '' desobj.namespace = n desobj.image_name = obj['image_name'] desobj.repo_name = r desobj.status = '' desobj.tag = obj['build_config']['tag_configs']['docker_repo_tag'] desobj.time = '' ss = xmltodict.parse(self.getbasejob_config()) jsonpickle.set_preferred_backend('json') ss['project']['description'] = jsonpickle.encode(desobj) ss['project']['properties']['com.tikal.hudson.plugins.notification.HudsonNotificationProperty']['endpoints'] \ ['com.tikal.hudson.plugins.notification.Endpoint']['url'] = config.JOBHOOKURL ss['project']['scm']['userRemoteConfigs']['hudson.plugins.git.UserRemoteConfig'] \ ['url'] = obj['build_config']['code_repo_clone_url'] ss['project']['scm']['branches']['hudson.plugins.git.BranchSpec'] \ ['name'] = '*/' + obj['build_config']['tag_configs']['code_repo_type_value'] ss['project']['builders']['hudson.tasks.Shell'][ 'command'] = config.JOBCOMMON1 ss['project']['builders']['com.cloudbees.dockerpublish.DockerBuilder']['registry'] \ ['url'] = config.REGISTRYURL b = str(obj['build_config']['tag_configs']['build_cache_enabled']) ss['project']['builders']['com.cloudbees.dockerpublish.DockerBuilder']['noCache'] \ = ('true' if b == 'false' else 'false') ss['project']['builders']['com.cloudbees.dockerpublish.DockerBuilder']['dockerfilePath'] \ = obj['build_config']['tag_configs']['dockerfile_location'] ss['project']['builders']['com.cloudbees.dockerpublish.DockerBuilder']['repoTag'] \ = obj['build_config']['tag_configs']['docker_repo_tag'] ss['project']['builders']['com.cloudbees.dockerpublish.DockerBuilder']['repoName'] \ = obj['image_name'] return xmltodict.unparse(ss) except Exception as e: print e.message def edit_docker_load_job_config(self, obj): try: # {docker_login} && docker import {httpfilename} {imagename} && docker push {imagename} ss = xmltodict.parse(self.getdocker_load_config()) desobj = callback() desobj.des = obj['export_file_url'] desobj.callback_url = obj['post_callback_url'] desobj.build_id = obj['build_id'] desobj.duration = '' desobj.namespace = "" desobj.repo_name = "" desobj.image_name = obj['image_name'] desobj.status = '' desobj.tag = obj['tag'] desobj.time = '' jsonpickle.set_preferred_backend('json') ss['project']['description'] = jsonpickle.encode(desobj) ss['project']['properties']['com.tikal.hudson.plugins.notification.HudsonNotificationProperty']['endpoints'] \ ['com.tikal.hudson.plugins.notification.Endpoint']['url'] = config.JOBHOOKURL tempstr = str( ss['project']['builders']['hudson.tasks.Shell']['command']) s = { r'{docker_login}': config.JOBCOMMON1, r'{httpfilename}': obj['export_file_url'], r'{imagename}': config.REGISTRYNAME + '/' + obj['image_name'] + ':' + obj['tag'] } ss['project']['builders']['hudson.tasks.Shell'][ 'command'] = utils.multiple_replace(tempstr, s) return xmltodict.unparse(ss) except Exception as e: print e.message def edit_docker_sync_job_config(self, obj): try: # {docker_login} && docker pull {oldimage} && docker tag {oldimage} {newimage} && docker push {newimage} ss = xmltodict.parse(self.getdocker_sync_config()) jsonUtil = JsonUtil() c = jsonUtil.parseJsonString(config.CLOUD_CONFIG) cid = obj['sync_cloud_id'] desobj = callback() desobj.des = "" desobj.callback_url = obj['post_callback_url'] desobj.build_id = '' desobj.duration = '' desobj.namespace = "" desobj.repo_name = obj['sync_cloud_id'] # 把cloudid 临时存在 这 desobj.image_name = obj['image_name'] desobj.status = '' desobj.tag = obj['tag'] desobj.time = '' jsonpickle.set_preferred_backend('json') ss['project']['description'] = jsonpickle.encode(desobj) ss['project']['properties']['com.tikal.hudson.plugins.notification.HudsonNotificationProperty']['endpoints'] \ ['com.tikal.hudson.plugins.notification.Endpoint']['url'] = config.JOBHOOKURL+'?cloudid='+obj['sync_cloud_id'] tempstr = str( ss['project']['builders']['hudson.tasks.Shell']['command']) s = { r'{docker_login}': c[cid]['login_common'], r'{oldimage}': config.REGISTRYNAME + '/' + obj['image_name'] + ':' + obj['tag'], r'{newimage}': c[cid]['registry_name'] + '/' + obj['image_name'] + ':' + obj['tag'] } ss['project']['builders']['hudson.tasks.Shell'][ 'command'] = utils.multiple_replace(tempstr, s) return xmltodict.unparse(ss) except Exception as e: print e.message def updateconfig_buildid(self, jn, imagename, build_id, callback_url): try: ss = xmltodict.parse(self.j.get_job_config(jn)) jsonpickle.set_preferred_backend('json') desobj = jsonpickle.decode(ss['project']['description']) if str(desobj.build_id) == str(build_id): return True desobj.build_id = build_id desobj.callback_url = callback_url desobj.image_name = imagename ss['project']['description'] = jsonpickle.encode(desobj) self.j.reconfig_job(jn, xmltodict.unparse(ss)) return True except Exception as e: print e.message return False @gen.coroutine def posthook(self, obj): # s = {r'/': r'...'} jn = obj['name'] bid = str(obj['build']['number']) # n, r = self.getjobnames(jn) re = hook() try: info = self.j.get_build_info(jn, int(bid)) if self.j.job_exists(jn): ss = xmltodict.parse(self.j.get_job_config(jn)) jsonpickle.set_preferred_backend('json') if isinstance(jsonpickle.decode(ss['project']['description']), callback): desobj = jsonpickle.decode(ss['project']['description']) re.namespace = desobj.namespace re.repo_name = desobj.repo_name re.build_id = str(obj['build']['number']) re.status = self.getstatus(obj['build']['status']) re.duration = info['duration'] re.tag = desobj.tag re.time = datetime.now() re.callurl = desobj.callback_url except Exception as e: print e.message re = None raise gen.Return(re) @gen.coroutine def post_docker_load_hook(self, obj): jn = obj['name'] bid = str(obj['build']['number']) re = postimage() try: # info = self.j.get_build_info(jn, int(bid)) if self.j.job_exists(jn): ss = xmltodict.parse(self.j.get_job_config(jn)) jsonpickle.set_preferred_backend('json') if isinstance(jsonpickle.decode(ss['project']['description']), callback): desobj = jsonpickle.decode(ss['project']['description']) re.image_name = desobj.image_name re.status = self.getstatus(obj['build']['status']) re.tag = desobj.tag re.export_file_url = desobj.des re.time = datetime.now() re.build_id = desobj.build_id re.post_callback_url = desobj.callback_url if re.status != 'error' and config.JENKINS_IMAGEOPTJOB_DELETE == 'true': self.j.delete_job(jn) except Exception as e: print e.message re = None raise gen.Return(re) @gen.coroutine def post_docker_sync_hook(self, obj, cloudid): jn = obj['name'] # bid = str(obj['build']['number']) jsonUtil = JsonUtil() c = jsonUtil.parseJsonString(config.CLOUD_CONFIG) j = Jenkins(c[cloudid]['jenkins_url'], username=c[cloudid]['jenkins_name'], password=c[cloudid]['jenkins_token']) re = postimagesync() try: if j.job_exists(jn): ss = xmltodict.parse(j.get_job_config(jn)) jsonpickle.set_preferred_backend('json') if isinstance(jsonpickle.decode(ss['project']['description']), callback): desobj = jsonpickle.decode(ss['project']['description']) re.image_name = desobj.image_name re.status = self.getstatus(obj['build']['status']) re.sync_cloud_id = desobj.repo_name re.tag = desobj.tag re.time = datetime.now() re.post_callback_url = desobj.callback_url if re.status != 'error' and config.JENKINS_IMAGEOPTJOB_DELETE == 'true': j.delete_job(jn) except Exception as e: print e.message re = None raise gen.Return(re) @gen.coroutine def createjob(self, jobname, obj): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) try: if self.j.job_exists(jn): re = createrespo(n, r, '工程已存在', 'error', datetime.now()) self.j.create_job(jn, self.edit_userjob_config(jn, obj)) re = createrespo(n, r, '', 'success', datetime.now()) except Exception as e: print e.message re = createrespo(n, r, '', 'error', datetime.now()) raise gen.Return(re) @gen.coroutine def create_docker_load_job(self, obj): # s = {r'/': r'...'} # jn = utils.multiple_replace(jobname, s) s = utils.randomstr(8) jn = '__docker_load_job_' + s re = postimage() re.created_at = datetime.now() re.image_name = obj['image_name'] re.build_id = str(obj['build_id']) re.post_callback_url = obj['post_callback_url'] re.tag = obj['tag'] re.status = 'started' try: if self.j.job_exists(jn): jn = jn + utils.randomstr(4) x = self.edit_docker_load_job_config(obj) self.j.create_job(jn, x) yield gen.sleep(0.5) self.j.build_job(jn) x = self.edit_docker_load_job_config(obj) self.j.create_job(jn, x) yield gen.sleep(0.5) self.j.build_job(jn) except Exception as e: print e.message re.status = 'error' raise gen.Return(re) @gen.coroutine def create_docker_sync_job(self, obj): # s = {r'/': r'...'} # jn = utils.multiple_replace(jobname, s) s = utils.randomstr(8) jn = '__docker_sync_job_' + s cid = obj['sync_cloud_id'] jsonUtil = JsonUtil() c = jsonUtil.parseJsonString(config.CLOUD_CONFIG) j = Jenkins(c[cid]['jenkins_url'], username=c[cid]['jenkins_name'], password=c[cid]['jenkins_token']) re = postimagesync() re.time = datetime.now() re.sync_cloud_id = obj['sync_cloud_id'] re.image_name = obj['image_name'] re.post_callback_url = obj['post_callback_url'] re.tag = obj['tag'] re.status = 'started' try: if j.job_exists(jn): jn = jn + utils.randomstr(4) j.create_job(jn, self.edit_docker_sync_job_config(obj)) yield gen.sleep(0.5) j.build_job(jn) j.create_job(jn, self.edit_docker_sync_job_config(obj)) yield gen.sleep(0.5) j.build_job(jn) except Exception as e: print e.message re.status = 'error' raise gen.Return(re) @gen.coroutine def editjob(self, jobname, obj): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) try: if self.j.job_exists(jn): self.j.reconfig_job(jn, self.edit_userjob_config(jn, obj)) re = createrespo(n, r, '', 'success', datetime.now()) else: re = createrespo(n, r, 'repo is not find', 'error', datetime.now()) except Exception as e: print e.message re = createrespo(n, r, '', 'error', datetime.now()) raise gen.Return(re) @gen.coroutine def getjobinfo(self, jobname): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) re = jobinfo() try: if self.j.job_exists(jn): re.namespace = n re.repo_name = r re.info = self.j.get_job_info(jn) except Exception as e: print e.message re.namespace = n re.repo_name = r re.info = "" raise gen.Return(re) @gen.coroutine def deljob(self, jobname): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) try: if self.j.job_exists(jn): self.j.delete_job(jn) re = delrespo(n, r, 'success') except Exception as e: print e.message re = delrespo(n, r, 'error') raise gen.Return(re) @gen.coroutine def stopbuild(self, jobname, build_id): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) try: if self.j.job_exists(jn) and self.j.get_build_info( jn, int(build_id)): self.j.stop_build(jn, int(build_id)) re = delbuild(n, r, build_id, 'aborted') else: re = delbuild(n, r, build_id, 'error') except Exception as e: print e.message re = delbuild(n, r, build_id, 'error') raise gen.Return(re) @gen.coroutine def postbuild(self, jobname, imagename, tag, callback_url): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) try: if self.j.job_exists(jn): j = self.j.get_job_info(jn) build_id = j['nextBuildNumber'] if self.j.get_queue_info() != []: re = postbuild(n, r, imagename, build_id, tag, datetime.now(), 'queue') elif j['queueItem'] != None: re = postbuild(n, r, imagename, build_id, tag, datetime.now(), 'queue') else: self.updateconfig_buildid(jn, imagename, build_id, callback_url) self.j.build_job(jn) re = postbuild(n, r, imagename, build_id, tag, datetime.now(), 'started') else: re = postbuild(n, r, '', '', datetime.now(), 'error') except Exception as e: print e.message re = postbuild(n, r, '', tag, datetime.now(), 'error') raise gen.Return(re) @gen.coroutine def getbuild(self, jobname, build_id): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) try: b = self.j.get_build_info(jn, int(build_id)) building = b['building'] duration = b['duration'] dt = self.getlenstr(b['timestamp'], 10) started_at = utils.timestamp_datetime(int(dt)) status = self.getstatus(b['result']) stdout = self.j.get_build_console_output(jn, int(build_id)) bd = build_detail(n, r, build_id, building, started_at, duration, status, stdout) except Exception as e: print e.message bd = build_detail(n, r, build_id, '', '', '', 'error', '') raise gen.Return(bd) def getdocker_sync_config(self): s = '''<?xml version='1.0' encoding='UTF-8'?> <project> <actions/> <description></description> <keepDependencies>false</keepDependencies> <properties> <com.tikal.hudson.plugins.notification.HudsonNotificationProperty plugin="[email protected]"> <endpoints> <com.tikal.hudson.plugins.notification.Endpoint> <protocol>HTTP</protocol> <format>JSON</format> <url>http://10.1.39.60:8080/v1/hook</url> <event>completed</event> <timeout>30000</timeout> <loglines>0</loglines> </com.tikal.hudson.plugins.notification.Endpoint> </endpoints> </com.tikal.hudson.plugins.notification.HudsonNotificationProperty> <com.synopsys.arc.jenkins.plugins.ownership.jobs.JobOwnerJobProperty plugin="[email protected]"/> <hudson.plugins.heavy__job.HeavyJobProperty plugin="[email protected]"> <weight>1</weight> </hudson.plugins.heavy__job.HeavyJobProperty> <jenkins.model.BuildDiscarderProperty> <strategy class="hudson.tasks.LogRotator"> <daysToKeep>30</daysToKeep> <numToKeep>50</numToKeep> <artifactDaysToKeep>-1</artifactDaysToKeep> <artifactNumToKeep>-1</artifactNumToKeep> </strategy> </jenkins.model.BuildDiscarderProperty> <job-metadata plugin="[email protected]"> <values class="linked-list"> <metadata-tree> <name>job-info</name> <parent class="job-metadata" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-tree> <name>last-saved</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-date> <name>time</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value> <time>1458098001639</time> <timezone>Asia/Shanghai</timezone> </value> <checked>false</checked> </metadata-date> <metadata-tree> <name>user</name> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-string> <name>display-name</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value>admin</value> </metadata-string> <metadata-string> <name>full-name</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value>admin</value> </metadata-string> </children> </metadata-tree> </children> </metadata-tree> </children> </metadata-tree> </values> </job-metadata> </properties> <scm class="hudson.scm.NullSCM"/> <scmCheckoutRetryCount>3</scmCheckoutRetryCount> <canRoam>true</canRoam> <disabled>false</disabled> <blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding> <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding> <triggers/> <concurrentBuild>false</concurrentBuild> <builders> <hudson.tasks.Shell> <command>{docker_login} && docker pull {oldimage} && docker tag --force=true {oldimage} {newimage} && docker push {newimage} </command> </hudson.tasks.Shell> </builders> <publishers/> <buildWrappers> <hudson.plugins.ansicolor.AnsiColorBuildWrapper plugin="[email protected]"> <colorMapName>xterm</colorMapName> </hudson.plugins.ansicolor.AnsiColorBuildWrapper> </buildWrappers> </project>''' return s def getdocker_load_config(self): s = '''<?xml version='1.0' encoding='UTF-8'?> <project> <actions/> <description></description> <keepDependencies>false</keepDependencies> <properties> <com.tikal.hudson.plugins.notification.HudsonNotificationProperty plugin="[email protected]"> <endpoints> <com.tikal.hudson.plugins.notification.Endpoint> <protocol>HTTP</protocol> <format>JSON</format> <url>http://10.1.39.60:8080/v1/hook</url> <event>completed</event> <timeout>30000</timeout> <loglines>0</loglines> </com.tikal.hudson.plugins.notification.Endpoint> </endpoints> </com.tikal.hudson.plugins.notification.HudsonNotificationProperty> <com.synopsys.arc.jenkins.plugins.ownership.jobs.JobOwnerJobProperty plugin="[email protected]"/> <hudson.plugins.heavy__job.HeavyJobProperty plugin="[email protected]"> <weight>1</weight> </hudson.plugins.heavy__job.HeavyJobProperty> <jenkins.model.BuildDiscarderProperty> <strategy class="hudson.tasks.LogRotator"> <daysToKeep>30</daysToKeep> <numToKeep>50</numToKeep> <artifactDaysToKeep>-1</artifactDaysToKeep> <artifactNumToKeep>-1</artifactNumToKeep> </strategy> </jenkins.model.BuildDiscarderProperty> <job-metadata plugin="[email protected]"> <values class="linked-list"> <metadata-tree> <name>job-info</name> <parent class="job-metadata" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-tree> <name>last-saved</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-date> <name>time</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value> <time>1458097635464</time> <timezone>Asia/Shanghai</timezone> </value> <checked>false</checked> </metadata-date> <metadata-tree> <name>user</name> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-string> <name>display-name</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value>admin</value> </metadata-string> <metadata-string> <name>full-name</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value>admin</value> </metadata-string> </children> </metadata-tree> </children> </metadata-tree> </children> </metadata-tree> </values> </job-metadata> </properties> <scm class="hudson.scm.NullSCM"/> <scmCheckoutRetryCount>3</scmCheckoutRetryCount> <canRoam>true</canRoam> <disabled>false</disabled> <blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding> <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding> <triggers/> <concurrentBuild>false</concurrentBuild> <builders> <hudson.tasks.Shell> <command>{docker_login} && docker import {httpfilename} {imagename} && docker push {imagename} </command> </hudson.tasks.Shell> </builders> <publishers/> <buildWrappers> <hudson.plugins.ansicolor.AnsiColorBuildWrapper plugin="[email protected]"> <colorMapName>xterm</colorMapName> </hudson.plugins.ansicolor.AnsiColorBuildWrapper> </buildWrappers> </project>''' return s def getbasejob_config(self): s = '''<?xml version='1.0' encoding='UTF-8'?> <project> <actions/> <description></description> <keepDependencies>false</keepDependencies> <properties> <com.tikal.hudson.plugins.notification.HudsonNotificationProperty plugin="[email protected]"> <endpoints> <com.tikal.hudson.plugins.notification.Endpoint> <protocol>HTTP</protocol> <format>JSON</format> <url>http://10.1.39.60:8080/v1/hook</url> <event>completed</event> <timeout>30000</timeout> <loglines>0</loglines> </com.tikal.hudson.plugins.notification.Endpoint> </endpoints> </com.tikal.hudson.plugins.notification.HudsonNotificationProperty> <com.synopsys.arc.jenkins.plugins.ownership.jobs.JobOwnerJobProperty plugin="[email protected]"/> <hudson.plugins.heavy__job.HeavyJobProperty plugin="[email protected]"> <weight>1</weight> </hudson.plugins.heavy__job.HeavyJobProperty> <jenkins.model.BuildDiscarderProperty> <strategy class="hudson.tasks.LogRotator"> <daysToKeep>30</daysToKeep> <numToKeep>50</numToKeep> <artifactDaysToKeep>-1</artifactDaysToKeep> <artifactNumToKeep>-1</artifactNumToKeep> </strategy> </jenkins.model.BuildDiscarderProperty> <job-metadata plugin="[email protected]"> <values class="linked-list"> <metadata-tree> <name>job-info</name> <parent class="job-metadata" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-tree> <name>last-saved</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-date> <name>time</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value> <time>1457958794480</time> <timezone>Asia/Shanghai</timezone> </value> <checked>false</checked> </metadata-date> <metadata-tree> <name>user</name> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-string> <name>display-name</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value>admin</value> </metadata-string> <metadata-string> <name>full-name</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value>admin</value> </metadata-string> </children> </metadata-tree> </children> </metadata-tree> </children> </metadata-tree> </values> </job-metadata> </properties> <scm class="hudson.plugins.git.GitSCM" plugin="[email protected]"> <configVersion>2</configVersion> <userRemoteConfigs> <hudson.plugins.git.UserRemoteConfig> <url>https://github.com/zhwenh/dockerfile-jdk-tomcat.git</url> </hudson.plugins.git.UserRemoteConfig> </userRemoteConfigs> <branches> <hudson.plugins.git.BranchSpec> <name>*/master</name> </hudson.plugins.git.BranchSpec> </branches> <doGenerateSubmoduleConfigurations>false</doGenerateSubmoduleConfigurations> <browser class="hudson.plugins.git.browser.GitLab"> <url></url> <version>7.11</version> </browser> <submoduleCfg class="list"/> <extensions/> </scm> <scmCheckoutRetryCount>3</scmCheckoutRetryCount> <canRoam>true</canRoam> <disabled>false</disabled> <blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding> <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding> <triggers/> <concurrentBuild>false</concurrentBuild> <builders> <hudson.tasks.Shell> <command>docker login -u admin -p admin123 -e [email protected] registry.test.com</command> </hudson.tasks.Shell> <com.cloudbees.dockerpublish.DockerBuilder plugin="[email protected]"> <server plugin="[email protected]"> <uri>unix:///var/run/docker.sock</uri> </server> <registry plugin="[email protected]"> <url>http://registry.test.com/v2</url> </registry> <repoName>zhwenh/tomcat</repoName> <noCache>false</noCache> <forcePull>true</forcePull> <dockerfilePath>./Dockerfile</dockerfilePath> <skipBuild>false</skipBuild> <skipDecorate>false</skipDecorate> <repoTag>2.3.1</repoTag> <skipPush>false</skipPush> <createFingerprint>true</createFingerprint> <skipTagLatest>false</skipTagLatest> <buildAdditionalArgs></buildAdditionalArgs> <forceTag>true</forceTag> </com.cloudbees.dockerpublish.DockerBuilder> </builders> <publishers> <hudson.plugins.emailext.ExtendedEmailPublisher plugin="[email protected]"> <recipientList>$DEFAULT_RECIPIENTS</recipientList> <configuredTriggers> <hudson.plugins.emailext.plugins.trigger.FailureTrigger> <email> <recipientList>$DEFAULT_RECIPIENTS</recipientList> <subject>$PROJECT_DEFAULT_SUBJECT</subject> <body>$PROJECT_DEFAULT_CONTENT</body> <recipientProviders> <hudson.plugins.emailext.plugins.recipients.DevelopersRecipientProvider/> </recipientProviders> <attachmentsPattern></attachmentsPattern> <attachBuildLog>false</attachBuildLog> <compressBuildLog>false</compressBuildLog> <replyTo>$PROJECT_DEFAULT_REPLYTO</replyTo> <contentType>project</contentType> </email> </hudson.plugins.emailext.plugins.trigger.FailureTrigger> <hudson.plugins.emailext.plugins.trigger.SuccessTrigger> <email> <recipientList>$DEFAULT_RECIPIENTS</recipientList> <subject>$PROJECT_DEFAULT_SUBJECT</subject> <body>$PROJECT_DEFAULT_CONTENT</body> <recipientProviders> <hudson.plugins.emailext.plugins.recipients.DevelopersRecipientProvider/> </recipientProviders> <attachmentsPattern></attachmentsPattern> <attachBuildLog>false</attachBuildLog> <compressBuildLog>false</compressBuildLog> <replyTo>$PROJECT_DEFAULT_REPLYTO</replyTo> <contentType>project</contentType> </email> </hudson.plugins.emailext.plugins.trigger.SuccessTrigger> </configuredTriggers> <contentType>default</contentType> <defaultSubject>$DEFAULT_SUBJECT</defaultSubject> <defaultContent>$DEFAULT_CONTENT</defaultContent> <attachmentsPattern></attachmentsPattern> <presendScript>$DEFAULT_PRESEND_SCRIPT</presendScript> <attachBuildLog>false</attachBuildLog> <compressBuildLog>false</compressBuildLog> <replyTo></replyTo> <saveOutput>false</saveOutput> <disabled>false</disabled> </hudson.plugins.emailext.ExtendedEmailPublisher> </publishers> <buildWrappers> <hudson.plugins.ansicolor.AnsiColorBuildWrapper plugin="[email protected]"> <colorMapName>xterm</colorMapName> </hudson.plugins.ansicolor.AnsiColorBuildWrapper> </buildWrappers> </project>''' return s @gen.coroutine def createbasejob(self): s = self.getbasejob_config() try: self.j.create_job(config.JENKINS_BASEJOB, s) except Exception as e: print e.message raise gen.Return(False) raise gen.Return(True)