def get_job_name(adaptation_release): tmp = 'adaptations_trunk_' + adaptation_release + '_ris' jenkins = Jenkins_Url(URL, USER, PASS) if jenkins.job_exists(tmp): return tmp tmp = 'adaptation_trunk_' + adaptation_release + '_ris' if jenkins.job_exists(tmp): return tmp if adaptation_release.__contains__("com.nsn."): tmp = 'adaptations_trunk_' + adaptation_release.split( 'com.nsn.')[-1] + '_ris' if jenkins.job_exists(tmp): return tmp if adaptation_release.__contains__("com.nokia."): tmp = 'adaptations_trunk_' + adaptation_release.split( 'com.nokia.')[-1] + '_ris' if jenkins.job_exists(tmp): return tmp if adaptation_release.__contains__("com.nokianetworks."): tmp = 'adaptations_trunk_' + adaptation_release.split( 'com.nokianetworks.')[-1] + '_ris' if jenkins.job_exists(tmp): return tmp print "Jenkins doesn't exist: " + tmp return 0
class TaskBuilder: def __init__(self, jenkinsURL): self.j = Jenkins(jenkinsURL) self.jobName = "" #with open("config.xml") as file: with open("./builds/config/job/config.xml") as file: self.templateConfig = file.read() self.template = Template(unicode(self.templateConfig)) def set_new_config(self, **params): self.newConfig = self.template.render(repos=params['repos'], description=params['repos']) def add_build(self, **params): self.set_job_name(**params) self.set_new_config(**params) if self.j.job_exists(self.jobName): self.do_build(**params) else: self.j.create_job(self.jobName, self.newConfig) self.do_build(**params) def do_build(self, **params): self.set_job_name(**params) self.set_new_config(**params) self.j.enable_job(self.jobName) self.j.build_job(self.jobName, {'branch': params['branch'], 'version': params['version'], 'author': params['author'], \ 'styleguide_repo': params['styleguide_repo'], 'styleguide_branch': params['styleguide_branch'], 'sidecar_repo': params['sidecar_repo'], \ 'sidecar_branch': params['sidecar_branch'], 'package_list': params['package_list'], 'upgrade_package': params['upgrade_package']}) def set_job_name(self,**params): buildUtil = BuildUtil() self.jobName = buildUtil.get_job_name(repos=params['repos']) def get_build_status(self, jobName): #job_info = self.j.get_job_info(self.jobName) #return build_status color_status = {"aborted":"Aborted", "red": "Failed", "blue": "Succcess"} if jobName == "": print "Have to specify job name" return False else: if self.j.job_exists(jobName): #Job exist in the job list job_info = self.j.get_job_info(jobName) if color_status.has_key(job_info['color']): return color_status[job_info['color']] else: return 'Running' else: print "Have to specify a validate job name" return False def get_job_name(self): return self.jobName
def get_job_name(adaptation_release): tmp = 'adaptations_trunk_' + adaptation_release + '_ris' jenkins = Jenkins_Url(URL, USER, PASS) if jenkins.job_exists(tmp): return tmp tmp = 'adaptation_trunk_' + adaptation_release + '_ris' if jenkins.job_exists(tmp): return tmp print "Jenkins doesn't exist: " + tmp log("FAILED", " jenkins name dosen't exits " + tmp) return 0
def get_job_name(adaptation_release): tmp = 'adaptations_trunk_' + adaptation_release +'_ris' jenkins = Jenkins_Url(URL,USER,PASS) if jenkins.job_exists(tmp): return tmp tmp = 'adaptation_trunk_' + adaptation_release +'_ris' if jenkins.job_exists(tmp): return tmp print "Jenkins doesn't exist: " + tmp log("FAILED"," jenkins name dosen't exits " + tmp) return 0
def _upsert_jenkins_job(server: Jenkins, full_job_name: str, xml_job_config: str): if server.job_exists(full_job_name): job = server.get_job_info(full_job_name) else: print(f'Jenkins job {full_job_name} doesn''t exist.') job = None config_xml_string = canonicalize_jenkins_xml(xml_job_config) if job is None and writeback: print(f'Creating new job {full_job_name}') try: server.create_job(full_job_name, config_xml_string) except JenkinsException as e: print(f'Failed creating job:\n{e}') elif job is not None: current_xml_string = canonicalize_jenkins_xml(server.get_job_config(full_job_name)) diffs = diff_files(BytesIO(bytearray(current_xml_string, encoding='utf-8')), BytesIO(bytearray(config_xml_string, encoding='utf-8'))) if len(diffs) > 0: print(f'Jenkins job {full_job_name} needs update') if writeback: print(json.dumps(diffs, indent=4)) if input(f'Are you sure you want to update configuration for {full_job_name} (y/n) ? ') == 'y': print(f'Updating job configuration for {full_job_name}') try: server.reconfig_job(full_job_name, config_xml_string) except JenkinsException as e: print(f'Failed updating job:\n{e}')
def create_docker_sync_job(self, obj): # s = {r'/': r'...'} # jn = utils.multiple_replace(jobname, s) s = utils.randomstr(8) jn = '__docker_sync_job_' + s cid = obj['sync_cloud_id'] jsonUtil = JsonUtil() c = jsonUtil.parseJsonString(config.CLOUD_CONFIG) j = Jenkins(c[cid]['jenkins_url'], username=c[cid]['jenkins_name'], password=c[cid]['jenkins_token']) re = postimagesync() re.time = datetime.now() re.sync_cloud_id = obj['sync_cloud_id'] re.image_name = obj['image_name'] re.post_callback_url = obj['post_callback_url'] re.tag = obj['tag'] re.status = 'started' try: if j.job_exists(jn): jn = jn + utils.randomstr(4) j.create_job(jn, self.edit_docker_sync_job_config(obj)) yield gen.sleep(0.5) j.build_job(jn) j.create_job(jn, self.edit_docker_sync_job_config(obj)) yield gen.sleep(0.5) j.build_job(jn) except Exception as e: print e.message re.status = 'error' raise gen.Return(re)
def post_docker_sync_hook(self, obj, cloudid): jn = obj['name'] # bid = str(obj['build']['number']) jsonUtil = JsonUtil() c = jsonUtil.parseJsonString(config.CLOUD_CONFIG) j = Jenkins(c[cloudid]['jenkins_url'], username=c[cloudid]['jenkins_name'], password=c[cloudid]['jenkins_token']) re = postimagesync() try: if j.job_exists(jn): ss = xmltodict.parse(j.get_job_config(jn)) jsonpickle.set_preferred_backend('json') if isinstance(jsonpickle.decode(ss['project']['description']), callback): desobj = jsonpickle.decode(ss['project']['description']) re.image_name = desobj.image_name re.status = self.getstatus(obj['build']['status']) re.sync_cloud_id = desobj.repo_name re.tag = desobj.tag re.time = datetime.now() re.post_callback_url = desobj.callback_url if re.status != 'error' and config.JENKINS_IMAGEOPTJOB_DELETE == 'true': j.delete_job(jn) except Exception as e: print e.message re = None raise gen.Return(re)
class JobBuilder: def __init__(self, jenkinsURL): self.j = Jenkins(jenkinsURL) self.jobName = "" def add_job(self, jobName, configString): self.jobName = jobName if self.j.job_exists(jobName): #Job exist in the job list return False else: self.j.create_job(self.jobName, configString) self.j.enable_job(self.jobName) return True def run_job(self, **params): if self.jobName == "": print "Have to add job firstly" return False else: self.j.enable_job(self.jobName) self.j.build_job(self.jobName, params)
def _ensure_jenkins_folder_exists(server: Jenkins, folder_path: str) -> bool: if server.job_exists(folder_path): return True elif writeback: server.create_folder(folder_path) return True print(f'Jenkins folder at {csvcubed_folder_path} does not exist.') return False
def generate_jenkins(release): jenkins = Jenkins_Url(URL, USER, PASS) trunk = trunk_jenkins(release) if jenkins.job_exists(trunk): log("trunk", "https://eslinv70.emea.nsn-net.net:8080/job/" + trunk) else: log("failed", "trunk " + release) n15_5 = n15_5_jenkins(release) if jenkins.job_exists(n15_5): log("n15.5", "https://eslinv70.emea.nsn-net.net:8080/job/" + n15_5) else: log("failed", "n15.5 " + release) n16 = n16_jenkins(release) if jenkins.job_exists(n16): log("n16", "https://eslinv70.emea.nsn-net.net:8080/job/" + n16) else: log("failed", "n16 " + release)
def generate_jenkins(release): jenkins = Jenkins_Url(URL,USER,PASS) trunk = trunk_jenkins(release) if jenkins.job_exists(trunk): log("trunk","https://eslinv70.emea.nsn-net.net:8080/job/" + trunk) else: log("failed","trunk " + release) n15_5 = n15_5_jenkins(release) if jenkins.job_exists(n15_5): log("n15.5","https://eslinv70.emea.nsn-net.net:8080/job/" + n15_5) else: log("failed","n15.5 " + release) n16 = n16_jenkins(release) if jenkins.job_exists(n16): log("n16","https://eslinv70.emea.nsn-net.net:8080/job/" + n16) else: log("failed","n16 " + release)
def run(self): # Run the egg_info step to find our VCS url. self.run_command('egg_info') if not self.distribution.metadata.url: log.warn("This package does not appear to be in any repository, " "aborting.") sys.exit(1) # Pull down Jenkins package base.fetch_build_eggs(['python-jenkins'], dist=self.distribution) from jenkins import Jenkins server = CONFIG.jenkins_url log.info("Connecting to Jenkins at %s" % server) jenkins = Jenkins(server, self.username, self.password) name = self.distribution.metadata.name if (self.matrix): log.info("Matrix job") if CONFIG.jenkins_matrix_job_xml: path, fname = CONFIG.jenkins_matrix_job.split(':') else: path, fname = None, 'jenkins_job_matrix.xml' else: log.info( "Non-matrix job - use \'--matrix\' option for matrix builds") if CONFIG.jenkins_job_xml: path, fname = CONFIG.jenkins_job.split(':') else: path, fname = None, 'jenkins_job.xml' with open(base.get_resource_file(fname, path)) as f: jenkins_config_xml = Template(f.read()) cfg_xml = jenkins_config_xml.safe_substitute( name=cgi.escape(name), hyphen_escaped_name=cgi.escape(name).replace("-", "?").replace( "_", "?"), description=cgi.escape(self.distribution.metadata.description), repository=self.distribution.metadata.url, email=self.distribution.metadata.author_email, python_string_xml=self._construct_string_values( self._get_active_python_versions()), virtualenv=CONFIG.virtualenv_executable, username=self.username) if jenkins.job_exists(name): log.error( "Job found at %s/job/%s Please delete this before creating a new one." % (server, name)) else: if (not self.dry_run): log.info("Creating job at %s/job/%s" % (server, name)) jenkins.create_job(name, cfg_xml)
def create_jenkins_job(init_job_config, job_name): """ Create new job by getting config file from existing job """ j = Jenkins("http://localhost:8080") str_config = j.get_job_config(init_job_config) if j.job_exists(job_name) == False: j.create_job(job_name, str_config)
def get_job_name(adaptation_release): tmp = 'adaptations_trunk_' + adaptation_release +'_ris' jenkins = Jenkins_Url(URL,USER,PASS) if jenkins.job_exists(tmp): return tmp tmp = 'adaptation_trunk_' + adaptation_release +'_ris' if jenkins.job_exists(tmp): return tmp if adaptation_release.__contains__("com.nsn."): tmp = 'adaptations_trunk_' + adaptation_release.split('com.nsn.')[-1] +'_ris' if jenkins.job_exists(tmp): return tmp if adaptation_release.__contains__("com.nokia."): tmp = 'adaptations_trunk_' + adaptation_release.split('com.nokia.')[-1] +'_ris' if jenkins.job_exists(tmp): return tmp if adaptation_release.__contains__("com.nokianetworks."): tmp = 'adaptations_trunk_' + adaptation_release.split('com.nokianetworks.')[-1] +'_ris' if jenkins.job_exists(tmp): return tmp print "Jenkins doesn't exist: " + tmp return 0
def configure(self): #self.logger.info("setting jenkins authentication method to use unix userdata") #self.checkForSuccess(bash("cp %s/jenkis_auth_file /var/lib/jenkins"%currentDir)) #self.logger.info("setting jenkins password") #self.logger.info("echo %s | sudo passwd jenkins --stdin"%self.jenkinsPasswd) #self.checkForSuccess(bash("service jenkins restart")) time.sleep(10) self.logger.info("checking if auth config is successful") j=Jenkins(self.jenkinsUrl, "admin", self.jenkinsPasswd) try: j.get_plugins() except Exception as e: self.logger.info("failed to retrive plugin info, may be auth problem") self.logger.exception(e) raise e self.logger.info("auth config successful") self.logger.info("installing requried plugins") self.logger.info("reading from jenkins plugins file %s/jenkins_plugins.txt"%currentDir) f=open('%s/jenkins_plugins.txt'%currentDir, 'r') pluginsToInstall=f.read() pluginsToInstall=pluginsToInstall.split('\n') self.installPlugins(j,pluginsToInstall) self.logger.info("Plugin installation complete") self.logger.info("restarting jenkins") self.restartJenkins() self.logger.info("Creating CI jobs on jenkins") for file in os.listdir(os.path.join(currentDir,'jenkins_job_templates')): try: if not j.job_exists(file): f=open(os.path.join(currentDir,'jenkins_job_templates',file),'r') config=f.read() f.close() self.logger.info("creating job %s, reading config from file %s"%(repr(file),os.path.join(currentDir,'jenkins_job_templates',file))) j.create_job(file, config) else: self.logger.info("job %s already exists, not creating"%file) except Exception as e: self.logger.warn("failed to create job %s"%(file)) self.logger.exception(e) self.logger.info("created all CI jobs") self.logger.info("Adding driverVM as node in jenkins") params = { 'port': '22', 'username': '******', 'credentialsId':'abe3f139-77bd-4db4-824b-1c79d5205d8b', 'host':self.config['nodes']['driverVM']['ip'] } self.addPasswdToCredential(j,"vagrant") self.checkForSuccess(bash("cp %s /var/lib/jenkins/."%(os.path.join(currentDir,"jenkins_credentials","credentials.xml")))) j.create_node('driverVM', numExecutors=20, nodeDescription="CI slave VM", remoteFS='/automation/jenkins', labels='driverVM', exclusive=True,launcher=jenkins.LAUNCHER_SSH, launcher_params=params) self.logger.info("jenkins install complete")
def run(self): # Run the egg_info step to find our VCS url. self.run_command('egg_info') if not self.distribution.metadata.url: log.warn("This package does not appear to be in any repository, " "aborting.") sys.exit(1) # Pull down Jenkins package base.fetch_build_eggs(['python-jenkins'], dist=self.distribution) from jenkins import Jenkins server = CONFIG.jenkins_url log.info("Connecting to Jenkins at %s" % server) jenkins = Jenkins(server, self.username, self.password) name = self.distribution.metadata.name if (self.matrix): log.info("Matrix job") if CONFIG.jenkins_matrix_job_xml: path, fname = CONFIG.jenkins_matrix_job.split(':') else: path, fname = None, 'jenkins_job_matrix.xml' else: log.info("Non-matrix job - use \'--matrix\' option for matrix builds") if CONFIG.jenkins_job_xml: path, fname = CONFIG.jenkins_job.split(':') else: path, fname = None, 'jenkins_job.xml' with open(base.get_resource_file(fname, path)) as f: jenkins_config_xml = Template(f.read()) cfg_xml = jenkins_config_xml.safe_substitute( name=cgi.escape(name), hyphen_escaped_name=cgi.escape(name).replace("-", "?").replace("_", "?"), description=cgi.escape(self.distribution.metadata.description), repository=self.distribution.metadata.url, email=self.distribution.metadata.author_email, python_string_xml=self._construct_string_values(self._get_active_python_versions()), virtualenv=CONFIG.virtualenv_executable, username=self.username ) if jenkins.job_exists(name): log.error("Job found at %s/job/%s Please delete this before creating a new one." % (server, name)) else: if (not self.dry_run): log.info("Creating job at %s/job/%s" % (server, name)) jenkins.create_job(name, cfg_xml)
def create_jenkins_job(config_path, job_name): """ Create new job with new config file """ f = open(config_path, "r") str_config = "" for line in f: str_config = str_config + line j = Jenkins("http://localhost:8080") if j.job_exists(job_name) == False: j.create_job(job_name, str_config)
def run(self): # Run the egg_info step to find our VCS url. self.run_command('egg_info') if not self.distribution.metadata.url: print "This package does not appear to be in any repository, aborting." sys.exit(1) # Pull down jenkins package self.fetch_build_eggs(['python_jenkins']) from jenkins import Jenkins print "Connecting to Jenkins at %s" % self.server # Prompt for password or use command-line if self.no_prompt: if self.password is None: print "Must specify password if no-prompt is set." sys.exit(1) password = self.password else: password = getpass.getpass("Please enter your Jenkins password: "******"Job already exists, update configuration? [y/n]") if not user_input in ('y', 'n'): continue break if user_input == 'n': return print "Reconfiguring job at %s/job/%s" % (self.server, name) jenkins.reconfig_job(name, cfg_xml) else: print "Creating job at %s/job/%s" % (self.server, name) jenkins.create_job(name, cfg_xml)
class TaskBuilder: def __init__(self, jenkinsURL): self.j = Jenkins(jenkinsURL) self.jobName = "" #with open("config.xml") as file: with open("./builds/config/job/config.xml") as file: self.templateConfig = file.read() self.template = Template(unicode(self.templateConfig)) def set_new_config(self, **params): self.newConfig = self.template.render(repos=params['repos'], description=params['repos']) def add_build(self, **params): self.set_job_name(**params) self.set_new_config(**params) if self.j.job_exists(self.jobName): self.do_build(**params) else: self.j.create_job(self.jobName, self.newConfig) self.do_build(**params) def do_build(self, **params): self.set_job_name(**params) self.set_new_config(**params) self.j.enable_job(self.jobName) self.j.build_job( self.jobName, { 'branch': params['branch'], 'version': params['version'], 'package_list': params['package_list'], 'upgrade_package': params['upgrade_package'] }) def set_job_name(self, **params): buildUtil = BuildUtil() self.jobName = buildUtil.get_job_name(repos=params['repos']) def get_build_status(self, **params): #job_info = self.j.get_job_info(self.jobName) #return build_status pass def get_job_name(self): return self.jobName
class TaskBuilder: def __init__(self, jenkinsURL): self.j = Jenkins(jenkinsURL) self.jobName = "" with open("config.xml") as file: self.templateConfig = file.read() self.template = Template(unicode(self.templateConfig)) def set_new_config(self, **params): self.newConfig = self.template.render(repos=params['repos'], description=params['repos']) def add_build(self, **params): self.set_job_name(**params) self.set_new_config(**params) if self.j.job_exists(self.jobName): self.do_build(**params) else: self.j.create_job(self.jobName, self.newConfig) self.do_build(**params) def do_build(self, **params): self.set_job_name(**params) self.set_new_config(**params) self.j.enable_job(self.jobName) self.j.build_job(self.jobName, {'branch': params['branch'], 'version': params['version'], 'package_list': params['package_list']}) def set_job_name(self,**params): buildUtil = BuildUtil() self.jobName = buildUtil.get_job_name(repos=params['repos']) def get_build_status(self, **params): #job_info = self.j.get_job_info(self.jobName) #return build_status pass def get_job_name(self): return self.jobName
""" Apache Ivy Base Job: Creates the base job for building Apache Ivy """ import os from jenkins import Jenkins, JenkinsError # get a handle for the jenkins server j = Jenkins(os.environ['ENDPOINT'], os.environ['USERNAME'], os.environ['PASSWORD']) # open the config.xml with open("./xml/jobs/apache-ivy-base-job/config.xml") as config_file: CONFIG = config_file.read() if j.job_exists("apache-ivy"): print("\tapache-ivy job exists. skipping...") else: try: print("\tCreating apache-ivy-base-job...") j.job_create("apache-ivy", CONFIG) except JenkinsError: print("\tCouldn't create the job")
def addJob(): startingUp = True JS_URL = app.config["JENKINS_URL"] while startingUp: try: app.logger.debug('Connecting to Jenkins %s as %s', JS_URL, app.config["UID"]) js = JenkinsServer(JS_URL, username=app.config["UID"], password=app.config["PWD"]) if (js.wait_for_normal_op(30)): app.logger.debug('Connected') app.logger.info('Hello from Jenkins %s', js.get_version()) startingUp = False except: app.logger.debug('%s caught during Jenkins connect', sys.exc_info()[0]) app.logger.debug('Waiting for startup, sleeping') time.sleep(5) try: app.logger.debug('Creating folder %s', app.config["FOLDER_NAME"]) js.create_job(app.config["FOLDER_NAME"], jenkins.EMPTY_FOLDER_XML) except: app.logger.debug('%s caught during folder create.', sys.exc_info()[0]) pass if app.config["GIT_UID"] != "": cj = credentialXML() try: app.logger.debug('Credential check.') try: app.logger.debug('Prophylactic delete of credential.') js.delete_credential(app.config["GIT_UID"], app.config["FOLDER_NAME"]) except: pass app.logger.debug('Creating credential.') js.create_credential(app.config["FOLDER_NAME"], cj) except: app.logger.debug('%s caught during config', sys.exc_info()[0]) else: app.logger.debug("Anonymous GIT access") app.logger.debug('Generating Job XML.') nj = jobXML() app.logger.debug('Creating job.') try: app.logger.debug('Does job exist?.') if js.job_exists(app.config["FOLDER_NAME"] + '/' + app.config["PIPELINE_NAME"]): exists = True app.logger.debug('Yep!') #app.logger.debug('Reconfiguring job %s using [%s]', app.config["PIPELINE_NAME"], nj) js.reconfig_job( app.config["FOLDER_NAME"] + '/' + app.config["PIPELINE_NAME"], nj) exists = True else: app.logger.debug('Nope!') #app.logger.debug('Trying to create job %s using [%s].', app.config["PIPELINE_NAME"], nj) js.create_job( app.config["FOLDER_NAME"] + '/' + app.config["PIPELINE_NAME"], nj) app.logger.debug( 'Attempting initial build to allow Jenkinsfile based configuration.' ) rid = js.build_job(app.config["FOLDER_NAME"] + '/' + app.config["PIPELINE_NAME"]) app.logger.debug('Started %d', rid) app.logger.debug('Initial build to set parameters (jobid=%d)', JenkinsBuild('', '')) except: app.logger.debug('%s caught during job config', sys.exc_info()[0])
class TaskBuilder: def __init__(self, jenkinsURL): self.j = Jenkins(jenkinsURL) self.jobName = "" #with open("config.xml") as file: with open("./builds/config/job/config.xml") as file: self.templateConfig = file.read() self.template = Template(unicode(self.templateConfig)) def set_new_config(self, **params): self.newConfig = self.template.render(repos=params['repos'], description=params['repos']) def add_build(self, **params): self.set_job_name(**params) self.set_new_config(**params) if self.j.job_exists(self.jobName): self.do_build(**params) else: self.j.create_job(self.jobName, self.newConfig) self.do_build(**params) def do_build(self, **params): self.set_job_name(**params) self.set_new_config(**params) self.j.enable_job(self.jobName) self.j.build_job(self.jobName, {'branch': params['branch'], 'version': params['version'], 'author': params['author'], \ 'styleguide_repo': params['styleguide_repo'], 'styleguide_branch': params['styleguide_branch'], 'sidecar_repo': params['sidecar_repo'], \ 'sidecar_branch': params['sidecar_branch'], 'package_list': params['package_list'], 'upgrade_package': params['upgrade_package'], \ 'latin': params['latin'], 'demo_data': params['demo_data']} ) def set_job_name(self, **params): buildUtil = BuildUtil() self.jobName = buildUtil.get_job_name(repos=params['repos']) def get_build_status(self, jobName): #job_info = self.j.get_job_info(self.jobName) #return build_status color_status = { "aborted": "Aborted", "red": "Failed", "blue": "Succcess" } if jobName == "": print "Have to specify job name" return False else: if self.j.job_exists(jobName): #Job exist in the job list job_info = self.j.get_job_info(jobName) if color_status.has_key(job_info['color']): return color_status[job_info['color']] else: return 'Running' else: print "Have to specify a validate job name" return False def get_job_name(self): return self.jobName def stop_jenkins_job(self, job_url): """ 1. stop one job """ stop_job_url = job_url + 'lastBuild/stop' try: ss = urllib2.urlopen(stop_job_url, None, 30).read() ss.close() except: return '{}' def stop_jenkins_jobs(self, jobName): """ 1. find all the sub job 2. stop all the running sub job 3. stop current job """ if self.j.job_exists(jobName): for x in self.j.get_job_info(jobName)['downstreamProjects']: if self.get_build_status(x['name']) == 'Running': self.stop_jenkins_job(x['url']) self.stop_jenkins_job(self.j.get_job_info(jobName)['url']) else: pass
class jenkinscls(object): def __init__(self): self.url = config.JENKINS_URL self.username = config.JENKINS_NAME self.token = config.JENKINS_TOKEN self.j = Jenkins(config.JENKINS_URL, username=config.JENKINS_NAME, password=config.JENKINS_TOKEN) def getjobnames(self, strval=''): rs = {r'...': r'/'} s = utils.multiple_replace(str(strval), rs).split('/') return s[0], "/".join(s[1:]) def getlenstr(self, strval, n): return str(strval)[0:n] def getstatus(self, strval): if str(strval) == 'FAILURE': return 'error' elif str(strval) == 'ABORTED': return 'aborted' elif str(strval) == 'SUCCESS': return 'success' else: return 'started' def edit_userjob_config(self, jn, obj): n, r = self.getjobnames(jn) try: desobj = callback() desobj.des = obj['description'] desobj.callback_url = "" desobj.build_id = '' desobj.duration = '' desobj.namespace = n desobj.image_name = obj['image_name'] desobj.repo_name = r desobj.status = '' desobj.tag = obj['build_config']['tag_configs']['docker_repo_tag'] desobj.time = '' ss = xmltodict.parse(self.getbasejob_config()) jsonpickle.set_preferred_backend('json') ss['project']['description'] = jsonpickle.encode(desobj) ss['project']['properties']['com.tikal.hudson.plugins.notification.HudsonNotificationProperty']['endpoints'] \ ['com.tikal.hudson.plugins.notification.Endpoint']['url'] = config.JOBHOOKURL ss['project']['scm']['userRemoteConfigs']['hudson.plugins.git.UserRemoteConfig'] \ ['url'] = obj['build_config']['code_repo_clone_url'] ss['project']['scm']['branches']['hudson.plugins.git.BranchSpec'] \ ['name'] = '*/' + obj['build_config']['tag_configs']['code_repo_type_value'] ss['project']['builders']['hudson.tasks.Shell'][ 'command'] = config.JOBCOMMON1 ss['project']['builders']['com.cloudbees.dockerpublish.DockerBuilder']['registry'] \ ['url'] = config.REGISTRYURL b = str(obj['build_config']['tag_configs']['build_cache_enabled']) ss['project']['builders']['com.cloudbees.dockerpublish.DockerBuilder']['noCache'] \ = ('true' if b == 'false' else 'false') ss['project']['builders']['com.cloudbees.dockerpublish.DockerBuilder']['dockerfilePath'] \ = obj['build_config']['tag_configs']['dockerfile_location'] ss['project']['builders']['com.cloudbees.dockerpublish.DockerBuilder']['repoTag'] \ = obj['build_config']['tag_configs']['docker_repo_tag'] ss['project']['builders']['com.cloudbees.dockerpublish.DockerBuilder']['repoName'] \ = obj['image_name'] return xmltodict.unparse(ss) except Exception as e: print e.message def edit_docker_load_job_config(self, obj): try: # {docker_login} && docker import {httpfilename} {imagename} && docker push {imagename} ss = xmltodict.parse(self.getdocker_load_config()) desobj = callback() desobj.des = obj['export_file_url'] desobj.callback_url = obj['post_callback_url'] desobj.build_id = obj['build_id'] desobj.duration = '' desobj.namespace = "" desobj.repo_name = "" desobj.image_name = obj['image_name'] desobj.status = '' desobj.tag = obj['tag'] desobj.time = '' jsonpickle.set_preferred_backend('json') ss['project']['description'] = jsonpickle.encode(desobj) ss['project']['properties']['com.tikal.hudson.plugins.notification.HudsonNotificationProperty']['endpoints'] \ ['com.tikal.hudson.plugins.notification.Endpoint']['url'] = config.JOBHOOKURL tempstr = str( ss['project']['builders']['hudson.tasks.Shell']['command']) s = { r'{docker_login}': config.JOBCOMMON1, r'{httpfilename}': obj['export_file_url'], r'{imagename}': config.REGISTRYNAME + '/' + obj['image_name'] + ':' + obj['tag'] } ss['project']['builders']['hudson.tasks.Shell'][ 'command'] = utils.multiple_replace(tempstr, s) return xmltodict.unparse(ss) except Exception as e: print e.message def edit_docker_sync_job_config(self, obj): try: # {docker_login} && docker pull {oldimage} && docker tag {oldimage} {newimage} && docker push {newimage} ss = xmltodict.parse(self.getdocker_sync_config()) jsonUtil = JsonUtil() c = jsonUtil.parseJsonString(config.CLOUD_CONFIG) cid = obj['sync_cloud_id'] desobj = callback() desobj.des = "" desobj.callback_url = obj['post_callback_url'] desobj.build_id = '' desobj.duration = '' desobj.namespace = "" desobj.repo_name = obj['sync_cloud_id'] # 把cloudid 临时存在 这 desobj.image_name = obj['image_name'] desobj.status = '' desobj.tag = obj['tag'] desobj.time = '' jsonpickle.set_preferred_backend('json') ss['project']['description'] = jsonpickle.encode(desobj) ss['project']['properties']['com.tikal.hudson.plugins.notification.HudsonNotificationProperty']['endpoints'] \ ['com.tikal.hudson.plugins.notification.Endpoint']['url'] = config.JOBHOOKURL+'?cloudid='+obj['sync_cloud_id'] tempstr = str( ss['project']['builders']['hudson.tasks.Shell']['command']) s = { r'{docker_login}': c[cid]['login_common'], r'{oldimage}': config.REGISTRYNAME + '/' + obj['image_name'] + ':' + obj['tag'], r'{newimage}': c[cid]['registry_name'] + '/' + obj['image_name'] + ':' + obj['tag'] } ss['project']['builders']['hudson.tasks.Shell'][ 'command'] = utils.multiple_replace(tempstr, s) return xmltodict.unparse(ss) except Exception as e: print e.message def updateconfig_buildid(self, jn, imagename, build_id, callback_url): try: ss = xmltodict.parse(self.j.get_job_config(jn)) jsonpickle.set_preferred_backend('json') desobj = jsonpickle.decode(ss['project']['description']) if str(desobj.build_id) == str(build_id): return True desobj.build_id = build_id desobj.callback_url = callback_url desobj.image_name = imagename ss['project']['description'] = jsonpickle.encode(desobj) self.j.reconfig_job(jn, xmltodict.unparse(ss)) return True except Exception as e: print e.message return False @gen.coroutine def posthook(self, obj): # s = {r'/': r'...'} jn = obj['name'] bid = str(obj['build']['number']) # n, r = self.getjobnames(jn) re = hook() try: info = self.j.get_build_info(jn, int(bid)) if self.j.job_exists(jn): ss = xmltodict.parse(self.j.get_job_config(jn)) jsonpickle.set_preferred_backend('json') if isinstance(jsonpickle.decode(ss['project']['description']), callback): desobj = jsonpickle.decode(ss['project']['description']) re.namespace = desobj.namespace re.repo_name = desobj.repo_name re.build_id = str(obj['build']['number']) re.status = self.getstatus(obj['build']['status']) re.duration = info['duration'] re.tag = desobj.tag re.time = datetime.now() re.callurl = desobj.callback_url except Exception as e: print e.message re = None raise gen.Return(re) @gen.coroutine def post_docker_load_hook(self, obj): jn = obj['name'] bid = str(obj['build']['number']) re = postimage() try: # info = self.j.get_build_info(jn, int(bid)) if self.j.job_exists(jn): ss = xmltodict.parse(self.j.get_job_config(jn)) jsonpickle.set_preferred_backend('json') if isinstance(jsonpickle.decode(ss['project']['description']), callback): desobj = jsonpickle.decode(ss['project']['description']) re.image_name = desobj.image_name re.status = self.getstatus(obj['build']['status']) re.tag = desobj.tag re.export_file_url = desobj.des re.time = datetime.now() re.build_id = desobj.build_id re.post_callback_url = desobj.callback_url if re.status != 'error' and config.JENKINS_IMAGEOPTJOB_DELETE == 'true': self.j.delete_job(jn) except Exception as e: print e.message re = None raise gen.Return(re) @gen.coroutine def post_docker_sync_hook(self, obj, cloudid): jn = obj['name'] # bid = str(obj['build']['number']) jsonUtil = JsonUtil() c = jsonUtil.parseJsonString(config.CLOUD_CONFIG) j = Jenkins(c[cloudid]['jenkins_url'], username=c[cloudid]['jenkins_name'], password=c[cloudid]['jenkins_token']) re = postimagesync() try: if j.job_exists(jn): ss = xmltodict.parse(j.get_job_config(jn)) jsonpickle.set_preferred_backend('json') if isinstance(jsonpickle.decode(ss['project']['description']), callback): desobj = jsonpickle.decode(ss['project']['description']) re.image_name = desobj.image_name re.status = self.getstatus(obj['build']['status']) re.sync_cloud_id = desobj.repo_name re.tag = desobj.tag re.time = datetime.now() re.post_callback_url = desobj.callback_url if re.status != 'error' and config.JENKINS_IMAGEOPTJOB_DELETE == 'true': j.delete_job(jn) except Exception as e: print e.message re = None raise gen.Return(re) @gen.coroutine def createjob(self, jobname, obj): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) try: if self.j.job_exists(jn): re = createrespo(n, r, '工程已存在', 'error', datetime.now()) self.j.create_job(jn, self.edit_userjob_config(jn, obj)) re = createrespo(n, r, '', 'success', datetime.now()) except Exception as e: print e.message re = createrespo(n, r, '', 'error', datetime.now()) raise gen.Return(re) @gen.coroutine def create_docker_load_job(self, obj): # s = {r'/': r'...'} # jn = utils.multiple_replace(jobname, s) s = utils.randomstr(8) jn = '__docker_load_job_' + s re = postimage() re.created_at = datetime.now() re.image_name = obj['image_name'] re.build_id = str(obj['build_id']) re.post_callback_url = obj['post_callback_url'] re.tag = obj['tag'] re.status = 'started' try: if self.j.job_exists(jn): jn = jn + utils.randomstr(4) x = self.edit_docker_load_job_config(obj) self.j.create_job(jn, x) yield gen.sleep(0.5) self.j.build_job(jn) x = self.edit_docker_load_job_config(obj) self.j.create_job(jn, x) yield gen.sleep(0.5) self.j.build_job(jn) except Exception as e: print e.message re.status = 'error' raise gen.Return(re) @gen.coroutine def create_docker_sync_job(self, obj): # s = {r'/': r'...'} # jn = utils.multiple_replace(jobname, s) s = utils.randomstr(8) jn = '__docker_sync_job_' + s cid = obj['sync_cloud_id'] jsonUtil = JsonUtil() c = jsonUtil.parseJsonString(config.CLOUD_CONFIG) j = Jenkins(c[cid]['jenkins_url'], username=c[cid]['jenkins_name'], password=c[cid]['jenkins_token']) re = postimagesync() re.time = datetime.now() re.sync_cloud_id = obj['sync_cloud_id'] re.image_name = obj['image_name'] re.post_callback_url = obj['post_callback_url'] re.tag = obj['tag'] re.status = 'started' try: if j.job_exists(jn): jn = jn + utils.randomstr(4) j.create_job(jn, self.edit_docker_sync_job_config(obj)) yield gen.sleep(0.5) j.build_job(jn) j.create_job(jn, self.edit_docker_sync_job_config(obj)) yield gen.sleep(0.5) j.build_job(jn) except Exception as e: print e.message re.status = 'error' raise gen.Return(re) @gen.coroutine def editjob(self, jobname, obj): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) try: if self.j.job_exists(jn): self.j.reconfig_job(jn, self.edit_userjob_config(jn, obj)) re = createrespo(n, r, '', 'success', datetime.now()) else: re = createrespo(n, r, 'repo is not find', 'error', datetime.now()) except Exception as e: print e.message re = createrespo(n, r, '', 'error', datetime.now()) raise gen.Return(re) @gen.coroutine def getjobinfo(self, jobname): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) re = jobinfo() try: if self.j.job_exists(jn): re.namespace = n re.repo_name = r re.info = self.j.get_job_info(jn) except Exception as e: print e.message re.namespace = n re.repo_name = r re.info = "" raise gen.Return(re) @gen.coroutine def deljob(self, jobname): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) try: if self.j.job_exists(jn): self.j.delete_job(jn) re = delrespo(n, r, 'success') except Exception as e: print e.message re = delrespo(n, r, 'error') raise gen.Return(re) @gen.coroutine def stopbuild(self, jobname, build_id): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) try: if self.j.job_exists(jn) and self.j.get_build_info( jn, int(build_id)): self.j.stop_build(jn, int(build_id)) re = delbuild(n, r, build_id, 'aborted') else: re = delbuild(n, r, build_id, 'error') except Exception as e: print e.message re = delbuild(n, r, build_id, 'error') raise gen.Return(re) @gen.coroutine def postbuild(self, jobname, imagename, tag, callback_url): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) try: if self.j.job_exists(jn): j = self.j.get_job_info(jn) build_id = j['nextBuildNumber'] if self.j.get_queue_info() != []: re = postbuild(n, r, imagename, build_id, tag, datetime.now(), 'queue') elif j['queueItem'] != None: re = postbuild(n, r, imagename, build_id, tag, datetime.now(), 'queue') else: self.updateconfig_buildid(jn, imagename, build_id, callback_url) self.j.build_job(jn) re = postbuild(n, r, imagename, build_id, tag, datetime.now(), 'started') else: re = postbuild(n, r, '', '', datetime.now(), 'error') except Exception as e: print e.message re = postbuild(n, r, '', tag, datetime.now(), 'error') raise gen.Return(re) @gen.coroutine def getbuild(self, jobname, build_id): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) try: b = self.j.get_build_info(jn, int(build_id)) building = b['building'] duration = b['duration'] dt = self.getlenstr(b['timestamp'], 10) started_at = utils.timestamp_datetime(int(dt)) status = self.getstatus(b['result']) stdout = self.j.get_build_console_output(jn, int(build_id)) bd = build_detail(n, r, build_id, building, started_at, duration, status, stdout) except Exception as e: print e.message bd = build_detail(n, r, build_id, '', '', '', 'error', '') raise gen.Return(bd) def getdocker_sync_config(self): s = '''<?xml version='1.0' encoding='UTF-8'?> <project> <actions/> <description></description> <keepDependencies>false</keepDependencies> <properties> <com.tikal.hudson.plugins.notification.HudsonNotificationProperty plugin="[email protected]"> <endpoints> <com.tikal.hudson.plugins.notification.Endpoint> <protocol>HTTP</protocol> <format>JSON</format> <url>http://10.1.39.60:8080/v1/hook</url> <event>completed</event> <timeout>30000</timeout> <loglines>0</loglines> </com.tikal.hudson.plugins.notification.Endpoint> </endpoints> </com.tikal.hudson.plugins.notification.HudsonNotificationProperty> <com.synopsys.arc.jenkins.plugins.ownership.jobs.JobOwnerJobProperty plugin="[email protected]"/> <hudson.plugins.heavy__job.HeavyJobProperty plugin="[email protected]"> <weight>1</weight> </hudson.plugins.heavy__job.HeavyJobProperty> <jenkins.model.BuildDiscarderProperty> <strategy class="hudson.tasks.LogRotator"> <daysToKeep>30</daysToKeep> <numToKeep>50</numToKeep> <artifactDaysToKeep>-1</artifactDaysToKeep> <artifactNumToKeep>-1</artifactNumToKeep> </strategy> </jenkins.model.BuildDiscarderProperty> <job-metadata plugin="[email protected]"> <values class="linked-list"> <metadata-tree> <name>job-info</name> <parent class="job-metadata" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-tree> <name>last-saved</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-date> <name>time</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value> <time>1458098001639</time> <timezone>Asia/Shanghai</timezone> </value> <checked>false</checked> </metadata-date> <metadata-tree> <name>user</name> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-string> <name>display-name</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value>admin</value> </metadata-string> <metadata-string> <name>full-name</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value>admin</value> </metadata-string> </children> </metadata-tree> </children> </metadata-tree> </children> </metadata-tree> </values> </job-metadata> </properties> <scm class="hudson.scm.NullSCM"/> <scmCheckoutRetryCount>3</scmCheckoutRetryCount> <canRoam>true</canRoam> <disabled>false</disabled> <blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding> <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding> <triggers/> <concurrentBuild>false</concurrentBuild> <builders> <hudson.tasks.Shell> <command>{docker_login} && docker pull {oldimage} && docker tag --force=true {oldimage} {newimage} && docker push {newimage} </command> </hudson.tasks.Shell> </builders> <publishers/> <buildWrappers> <hudson.plugins.ansicolor.AnsiColorBuildWrapper plugin="[email protected]"> <colorMapName>xterm</colorMapName> </hudson.plugins.ansicolor.AnsiColorBuildWrapper> </buildWrappers> </project>''' return s def getdocker_load_config(self): s = '''<?xml version='1.0' encoding='UTF-8'?> <project> <actions/> <description></description> <keepDependencies>false</keepDependencies> <properties> <com.tikal.hudson.plugins.notification.HudsonNotificationProperty plugin="[email protected]"> <endpoints> <com.tikal.hudson.plugins.notification.Endpoint> <protocol>HTTP</protocol> <format>JSON</format> <url>http://10.1.39.60:8080/v1/hook</url> <event>completed</event> <timeout>30000</timeout> <loglines>0</loglines> </com.tikal.hudson.plugins.notification.Endpoint> </endpoints> </com.tikal.hudson.plugins.notification.HudsonNotificationProperty> <com.synopsys.arc.jenkins.plugins.ownership.jobs.JobOwnerJobProperty plugin="[email protected]"/> <hudson.plugins.heavy__job.HeavyJobProperty plugin="[email protected]"> <weight>1</weight> </hudson.plugins.heavy__job.HeavyJobProperty> <jenkins.model.BuildDiscarderProperty> <strategy class="hudson.tasks.LogRotator"> <daysToKeep>30</daysToKeep> <numToKeep>50</numToKeep> <artifactDaysToKeep>-1</artifactDaysToKeep> <artifactNumToKeep>-1</artifactNumToKeep> </strategy> </jenkins.model.BuildDiscarderProperty> <job-metadata plugin="[email protected]"> <values class="linked-list"> <metadata-tree> <name>job-info</name> <parent class="job-metadata" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-tree> <name>last-saved</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-date> <name>time</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value> <time>1458097635464</time> <timezone>Asia/Shanghai</timezone> </value> <checked>false</checked> </metadata-date> <metadata-tree> <name>user</name> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-string> <name>display-name</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value>admin</value> </metadata-string> <metadata-string> <name>full-name</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value>admin</value> </metadata-string> </children> </metadata-tree> </children> </metadata-tree> </children> </metadata-tree> </values> </job-metadata> </properties> <scm class="hudson.scm.NullSCM"/> <scmCheckoutRetryCount>3</scmCheckoutRetryCount> <canRoam>true</canRoam> <disabled>false</disabled> <blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding> <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding> <triggers/> <concurrentBuild>false</concurrentBuild> <builders> <hudson.tasks.Shell> <command>{docker_login} && docker import {httpfilename} {imagename} && docker push {imagename} </command> </hudson.tasks.Shell> </builders> <publishers/> <buildWrappers> <hudson.plugins.ansicolor.AnsiColorBuildWrapper plugin="[email protected]"> <colorMapName>xterm</colorMapName> </hudson.plugins.ansicolor.AnsiColorBuildWrapper> </buildWrappers> </project>''' return s def getbasejob_config(self): s = '''<?xml version='1.0' encoding='UTF-8'?> <project> <actions/> <description></description> <keepDependencies>false</keepDependencies> <properties> <com.tikal.hudson.plugins.notification.HudsonNotificationProperty plugin="[email protected]"> <endpoints> <com.tikal.hudson.plugins.notification.Endpoint> <protocol>HTTP</protocol> <format>JSON</format> <url>http://10.1.39.60:8080/v1/hook</url> <event>completed</event> <timeout>30000</timeout> <loglines>0</loglines> </com.tikal.hudson.plugins.notification.Endpoint> </endpoints> </com.tikal.hudson.plugins.notification.HudsonNotificationProperty> <com.synopsys.arc.jenkins.plugins.ownership.jobs.JobOwnerJobProperty plugin="[email protected]"/> <hudson.plugins.heavy__job.HeavyJobProperty plugin="[email protected]"> <weight>1</weight> </hudson.plugins.heavy__job.HeavyJobProperty> <jenkins.model.BuildDiscarderProperty> <strategy class="hudson.tasks.LogRotator"> <daysToKeep>30</daysToKeep> <numToKeep>50</numToKeep> <artifactDaysToKeep>-1</artifactDaysToKeep> <artifactNumToKeep>-1</artifactNumToKeep> </strategy> </jenkins.model.BuildDiscarderProperty> <job-metadata plugin="[email protected]"> <values class="linked-list"> <metadata-tree> <name>job-info</name> <parent class="job-metadata" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-tree> <name>last-saved</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-date> <name>time</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value> <time>1457958794480</time> <timezone>Asia/Shanghai</timezone> </value> <checked>false</checked> </metadata-date> <metadata-tree> <name>user</name> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-string> <name>display-name</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value>admin</value> </metadata-string> <metadata-string> <name>full-name</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value>admin</value> </metadata-string> </children> </metadata-tree> </children> </metadata-tree> </children> </metadata-tree> </values> </job-metadata> </properties> <scm class="hudson.plugins.git.GitSCM" plugin="[email protected]"> <configVersion>2</configVersion> <userRemoteConfigs> <hudson.plugins.git.UserRemoteConfig> <url>https://github.com/zhwenh/dockerfile-jdk-tomcat.git</url> </hudson.plugins.git.UserRemoteConfig> </userRemoteConfigs> <branches> <hudson.plugins.git.BranchSpec> <name>*/master</name> </hudson.plugins.git.BranchSpec> </branches> <doGenerateSubmoduleConfigurations>false</doGenerateSubmoduleConfigurations> <browser class="hudson.plugins.git.browser.GitLab"> <url></url> <version>7.11</version> </browser> <submoduleCfg class="list"/> <extensions/> </scm> <scmCheckoutRetryCount>3</scmCheckoutRetryCount> <canRoam>true</canRoam> <disabled>false</disabled> <blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding> <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding> <triggers/> <concurrentBuild>false</concurrentBuild> <builders> <hudson.tasks.Shell> <command>docker login -u admin -p admin123 -e [email protected] registry.test.com</command> </hudson.tasks.Shell> <com.cloudbees.dockerpublish.DockerBuilder plugin="[email protected]"> <server plugin="[email protected]"> <uri>unix:///var/run/docker.sock</uri> </server> <registry plugin="[email protected]"> <url>http://registry.test.com/v2</url> </registry> <repoName>zhwenh/tomcat</repoName> <noCache>false</noCache> <forcePull>true</forcePull> <dockerfilePath>./Dockerfile</dockerfilePath> <skipBuild>false</skipBuild> <skipDecorate>false</skipDecorate> <repoTag>2.3.1</repoTag> <skipPush>false</skipPush> <createFingerprint>true</createFingerprint> <skipTagLatest>false</skipTagLatest> <buildAdditionalArgs></buildAdditionalArgs> <forceTag>true</forceTag> </com.cloudbees.dockerpublish.DockerBuilder> </builders> <publishers> <hudson.plugins.emailext.ExtendedEmailPublisher plugin="[email protected]"> <recipientList>$DEFAULT_RECIPIENTS</recipientList> <configuredTriggers> <hudson.plugins.emailext.plugins.trigger.FailureTrigger> <email> <recipientList>$DEFAULT_RECIPIENTS</recipientList> <subject>$PROJECT_DEFAULT_SUBJECT</subject> <body>$PROJECT_DEFAULT_CONTENT</body> <recipientProviders> <hudson.plugins.emailext.plugins.recipients.DevelopersRecipientProvider/> </recipientProviders> <attachmentsPattern></attachmentsPattern> <attachBuildLog>false</attachBuildLog> <compressBuildLog>false</compressBuildLog> <replyTo>$PROJECT_DEFAULT_REPLYTO</replyTo> <contentType>project</contentType> </email> </hudson.plugins.emailext.plugins.trigger.FailureTrigger> <hudson.plugins.emailext.plugins.trigger.SuccessTrigger> <email> <recipientList>$DEFAULT_RECIPIENTS</recipientList> <subject>$PROJECT_DEFAULT_SUBJECT</subject> <body>$PROJECT_DEFAULT_CONTENT</body> <recipientProviders> <hudson.plugins.emailext.plugins.recipients.DevelopersRecipientProvider/> </recipientProviders> <attachmentsPattern></attachmentsPattern> <attachBuildLog>false</attachBuildLog> <compressBuildLog>false</compressBuildLog> <replyTo>$PROJECT_DEFAULT_REPLYTO</replyTo> <contentType>project</contentType> </email> </hudson.plugins.emailext.plugins.trigger.SuccessTrigger> </configuredTriggers> <contentType>default</contentType> <defaultSubject>$DEFAULT_SUBJECT</defaultSubject> <defaultContent>$DEFAULT_CONTENT</defaultContent> <attachmentsPattern></attachmentsPattern> <presendScript>$DEFAULT_PRESEND_SCRIPT</presendScript> <attachBuildLog>false</attachBuildLog> <compressBuildLog>false</compressBuildLog> <replyTo></replyTo> <saveOutput>false</saveOutput> <disabled>false</disabled> </hudson.plugins.emailext.ExtendedEmailPublisher> </publishers> <buildWrappers> <hudson.plugins.ansicolor.AnsiColorBuildWrapper plugin="[email protected]"> <colorMapName>xterm</colorMapName> </hudson.plugins.ansicolor.AnsiColorBuildWrapper> </buildWrappers> </project>''' return s @gen.coroutine def createbasejob(self): s = self.getbasejob_config() try: self.j.create_job(config.JENKINS_BASEJOB, s) except Exception as e: print e.message raise gen.Return(False) raise gen.Return(True)
import os import settings from jenkins import Jenkins, JenkinsError # get a handle for the jenkins server j = Jenkins(os.environ['ENDPOINT'], os.environ['USERNAME'], os.environ['PASSWORD']) # open the config.xml with open("./xml/jobs/hourly/config.xml") as config_file: config = config_file.read() for team in settings.teams: for job in settings.jobs: target = team + "-" + job if j.job_exists(target): print("\tJob exists; skipping: %s" % target) else: print("\tCreating job: %s" % target) try: j.job_create(team + "-" + job, config) except JenkinsError as e: print("\tERROR: %s", e)
def execOnJenkins(self,env,testSpecifierString,mailto,execOnOneZone=True): try: testMgr=testManager(testSpecifierString,env['virtenvPath']) jobModifier=modifyJOb() modifiedjob="" j=Jenkins('http://jenkins-ccp.citrix.com','bharatk','BharatK') tests=testMgr.getTests() if(tests==None): raise Exception("found no tests to run") while(not tests is None): #trigger a jenkins job. os.chdir(env['virtenvPath']) self.logger.info("launching jenkins TestExecutor Job") #createing testexecutorjobs for each zone. cscfg=configGenerator.getSetupConfig(env['config_file']) jobIdentifierList=[] for zone in cscfg.zones: for pod in zone.pods: for cluster in pod.clusters: modifiedjob=jobModifier.addTests(env['build_number'],tests) file=open("/root/cloud-autodeploy2/newcode/"+modifiedjob,'r') config=file.read() file.close() bash("rm -f /root/cloud-autodeploy2/newcode/%s"%modifiedjob) if(not j.job_exists(modifiedjob)): j.create_job(modifiedjob,config) else: j.reconfig_job(modifiedjob,config) j.build_job(modifiedjob, {'BASEDIR':env['virtenvPath'], 'MGMT_SVR' : env['hostip'],'buildNumber':env['build_number'],'zoneName':zone.name,'hypervisor':cluster.hypervisor.lower(),'zoneType':zone.networktype,'configFileName':env['config_file'],'token':'bharat'}) jobIdentifierList.append(zone.name) break break if (execOnOneZone): break self.waitForJobComplete(env['virtenvPath'],jobIdentifierList) tests=testMgr.getTests() j.delete_job(modifiedjob) jobIdentifierList=[] bugLoggerData=[] time.sleep(30) for zone in cscfg.zones: self.logger.info(zone.name) for pod in zone.pods: for cluster in pod.clusters: self.logger.info("creating a jeknins job to generate results and email notfication for hypervisor %s and zone %s"%(cluster.hypervisor, zone.name)) modifiedjob=jobModifier.modifyReportGenerator(env['build_number']+"_"+zone.name+"_"+cluster.hypervisor, mailto) jobname=modifiedjob file=open("/root/cloud-autodeploy2/newcode/"+modifiedjob,'r') config=file.read() file.close() j.create_job(modifiedjob,config) j.build_job(modifiedjob, {'buildNumber':env['build_number'],'BuildNo':env['build_number'], 'MGMT_SVR' : env['hostip'], 'BASEDIR':env['virtenvPath'], 'version':env['version'], 'BranchInfo':env['version'],\ 'GitRepoUrl':env['repo_url'],'GitCommitId':env['commit_id'], 'CIRunStartDateTime':env['startTime'],'CIRunEndDateTime':time.strftime("%c"), 'WikiLinks':'https://cwiki.apache.org/confluence/display/CLOUDSTACK/Infrastructure%2CCI%2CSimulator%2CAutomation+Changes','hypervisor':cluster.hypervisor.lower(), 'HyperVisorInfo':cluster.hypervisor.lower(), 'zoneName':zone.name, 'BuildReport':"http://jenkins-ccp.citrix.com/job/"+jobname+"/1/testReport/",'token':'bharat'}) jobIdentifierList.append("report_"+zone.name) jobDetails={"job_name":modifiedjob,"related_data_path":env['virtenvPath']} self.resourceMgr.addJobDetails(jobDetails) bugLoggerData.append({'hypervisor':cluster.hypervisor.lower(), 'branch':env['version'],'logname':cluster.hypervisor.lower()+'__Log_'+env['build_number'], 'type':'BVT'}) self.logger.info("bug logger data in zone looop %s"%bugLoggerData) break break if (execOnOneZone): #env['hypervisor':cluster.hypervisor.lower()] break self.logger.info("job identifier list", jobIdentifierList) self.waitForJobComplete(env['virtenvPath'],jobIdentifierList) #self.logger.info("deleting the reporter job on jenkins job_name=%s",jobname) #j.delete_job(jobname) self.logger.info("cleaning up the workspace") bash("rm -f /root/cloud-autodeploy2/newcode/%s"%modifiedjob) self.logger.info("running bug logger") #self.runBugLogger(bugLoggerData) #os.system("rm -rf %s"%(self.jenkinsWorkspace+"/"+jobname)) except Exception, e: self.logger.error(e)
import os import settings from jenkins import Jenkins, JenkinsError # get a handle for the jenkins server j = Jenkins(os.environ['ENDPOINT'], os.environ['USERNAME'], os.environ['PASSWORD']) # open the config.xml with open("./xml/folders/base/config.xml") as config_file: config = config_file.read() for team in settings.teams: if j.job_exists(team): print("\tFolder exists; skipping: %s" % team) else: try: print("\tCreating folder: %s" % team) j.job_create(team, config) except JenkinsError as e: print("\tERROR: %s" % e)
class JenkinsTools(object): """ Jenkins操作工具类 """ def __init__(self, url, username, password): """ 初始化Jenkins连接信息 :param url: Jenkins地址 :param username: 登录用户名 :param password: 登录密码 """ try: self.session = Jenkins(url=url, username=username, password=password) except Exception: logger.exception('初始化Jenkins连接过程中发生异常,请检查!') @property def get_session(self): """ 返回Jenkins会话 :return: """ return self.session def get_job_info(self, job_name): """ 获取构建项目信息 :param job_name: 构建项目名称 :return: """ try: if self.session.job_exists(name=job_name): info = self.session.get_job_info(name=job_name) return info else: logger.warning( '[WARNING]: Jenkins构建项目"{}"并不存在,请检查!'.format(job_name)) sys.exit(1) except Exception: logger.exception('查看Jenkins构建项目"{}"过程中发生异常,请检查!'.format(job_name)) def get_job_info_by_regex(self, pattern): """ 正则表达式方式获取构建项目信息 :param pattern: 构建项目名称(正则表达式) :return: """ try: info_regex = self.session.get_job_info_regex(pattern=pattern) return info_regex except Exception: logger.exception('通过正则表达式"{}"查看匹配构建项目过程中发生异常,请检查!'.format(pattern)) def get_job_build_info(self, job_name, build_number): """ 通过构建项目名称及构建任务ID查看构建信息 :param job_name: 构建项目名称 :param build_number: 构建ID :return: """ try: result = self.session.get_build_info(name=job_name, number=build_number) return result except Exception: logger.exception('通过构建项目名称"{}"和构建ID"{}"查看构建信息过程中发生异常,请检查!'.format( job_name, build_number)) def rebase_build_info(self, job_name): """ 获取指定项目名称的自定义构建信息 :param job_name: 构建项目名称 :return: 构建信息自定义响应体 """ response_list = [] try: job_set = self.get_job_info(job_name) if job_set['lastBuild'] is not None: response_list.append( dict( ActionJob=job_set['displayName'], LastNumber=job_set['lastBuild']['number'], Building=self.get_job_build_info( job_set['displayName'], job_set['lastBuild']['number'])['building'], Result=self.get_job_build_info( job_set['displayName'], job_set['lastBuild']['number'])['result'], Time=trans_timestamp( self.get_job_build_info( job_set['displayName'], job_set['lastBuild']['number'])['timestamp']))) else: response_list.append( dict(ActionJob=job_set['displayName'], LastBuild="No latest build job info now.")) return response_list except Exception: logger.exception( '通过构建项目名称"{}"查看其自定义构建信息过程中发生异常,请检查!'.format(job_name)) def rebase_build_info_by_regex(self, pattern): """ 获取匹配正则表达式项目名称的自定义构建信息 :param pattern: 构建项目名称(正则表达式) :return: 构建信息自定义响应体 """ response_list = [] try: job_set = self.get_job_info_by_regex(pattern) if len(job_set) != 0: for i in job_set: if i['lastBuild'] is not None: response_list.append( dict(ActionJob=i['displayName'], LastNumber=i['lastBuild']['number'], Building=self.get_build_info( i['displayName'], i['lastBuild']['number'])['building'], Result=self.get_build_info( i['displayName'], i['lastBuild']['number'])['result'], Time=trans_timestamp( self.get_build_info( i['displayName'], i['lastBuild'] ['number'])['timestamp']), Operator=self.get_build_info( i['displayName'], i['lastBuild']['number'])['actions'][0] ['causes'][0]['shortDescription'])) else: response_list.append( dict(ActionJob=i['displayName'], LastBuild="No latest build job info now.")) return response_list except re.error: return "invalid pattern" except Exception: logger.exception( '通过正则表达式"{}"查看匹配项目的自定义构建信息过程中发生异常,请检查!'.format(pattern)) def stop_job_latest_build(self, job_name): """ 停止匹配正则表达式的构建项目的最近构建任务 :param job_name: 构建项目名称 :return: """ try: lastNumber = self.rebase_build_info( job_name=job_name)[0]['LastNumber'] self.session.stop_build(name=job_name, number=lastNumber) except Exception: logger.exception( '停止匹配正则表达式"{}"的构建项目的最近构建任务过程中发生异常,请检查!'.format(job_name))
import os import settings from jenkins import Jenkins, JenkinsError # get a handle for the jenkins server j = Jenkins(os.environ['ENDPOINT'], os.environ['USERNAME'], os.environ['PASSWORD']) for team in settings.teams: if j.job_exists(team): try: print("\tDeleting job: %s" % team) j.job_delete(team) except JenkinsError: print("\tCouldn't delete job: %s" % team) else: print("\tJob doesn't exist; skipping: %s" % team) for job in settings.jobs: target = team + "-" + job if j.job_exists(target): try: print("\tDeleting job: %s" % target) j.job_delete(target) except JenkinsError: print("\tCouldn't delete job: %s" % target) else: print("\tJob doesn't exist; skipping: %s" % target)
def execOnJenkins(self, env, testSpecifierString, mailto, reRunFailedTests=True, retryCount=1, report=True, execOnOneZone=True, postOnPr=False, testMgr=None, avoidZones=None): try: env['hypervisor'] = '' if avoidZones is None: avoidZones=[] if testMgr is None: testMgr = testManager(testSpecifierString, env['virtenvPath']) jobModifier = modifyJOb() modifiedjob = '' j = Jenkins('http://jenkins-ccp.citrix.com', 'bharatk', 'BharatK') tests = testMgr.getTests() if tests == None: raise Exception('found no tests to run') while tests is not None: os.chdir(env['virtenvPath']) self.logger.info('launching jenkins TestExecutor Job') cscfg = configGenerator.getSetupConfig(env['config_file']) for zone in cscfg.zones: if zone.name in avoidZones: continue for pod in zone.pods: for cluster in pod.clusters: for modifiedjob in jobModifier.addTests(env['build_number'], tests, self.throttle_job_count): file = open('/root/cloud-autodeploy2/newcode/' + modifiedjob, 'r') config = file.read() file.close() bash('rm -f /root/cloud-autodeploy2/newcode/%s' % modifiedjob) if not j.job_exists(modifiedjob): j.create_job(modifiedjob, config) else: j.delete_job(modifiedjob) j.create_job(modifiedjob, config) j.build_job(modifiedjob, {'BASEDIR': env['virtenvPath'], 'MGMT_SVR': env['hostip'], 'buildNumber': env['build_number'], 'zoneName': zone.name, 'hypervisor': cluster.hypervisor.lower(), 'zoneType': zone.networktype, 'configFileName': env['config_file'], 'token': 'bharat'}) self.waitForJobComplete(env['virtenvPath'], [zone.name]) env['hypervisor'] = '%s,%s' % (env['hypervisor'], cluster.hypervisor.lower()) break break if execOnOneZone: break tests = testMgr.getTests() j.delete_job(modifiedjob) reportAnalyserMap=self.getReportAnalysers(cscfg, env, execOnOneZone) if(reRunFailedTests): while retryCount > 0: self.logger.info("checking if we need to re run any of the tests") testsToReRun=[] for key in reportAnalyserMap.keys(): tests=reportAnalyserMap[key].suitsToRerun if(tests is None): avoidZones.append(key) else: testMgr.addTestsToReRun(tests) retryCount-=1 self.logger.info("zone name:%s The follwoing tests will be re run %s"%(key,tests)) if(len(testsToReRun)==0): break else: self.execOnJenkins(env, testSpecifierString, mailto, reRunFailedTests, retryCount, False, execOnOneZone, postOnPr, testMgr, avoidZones) if report and postOnPr: for key in reportAnalyserMap.keys(): self.reportOnPr(reportAnalyserMap[key].generateTextReport2(), env) elif report: self.reportUsingJenkinsEmailPlugin(cscfg, env) return env except Exception as e: self.logger.exception(e)
""" Build: Builds every job for every team defined in settings """ import os from jenkins import Jenkins, JenkinsError import settings # get a handle for the jenkins server j = Jenkins(os.environ['ENDPOINT'], os.environ['USERNAME'], os.environ['PASSWORD']) for team in settings.teams: for job in settings.jobs: target = team+"-"+job if j.job_exists(target): try: print("\tBuiding job: %s" % target) j.job_build(target) except JenkinsError: print("\tCouldn't build job: %s" % target)