class myjenkins: def __init__(self): self.server = Jenkins(jenkins_url, username=username, password=token_dict) def getjobconfig(self, job_name): job_info = self.server.get_job_config(job_name) return job_info # 根据给来的app id查询job的完整name,目前只看'0-Site'和'1-API'两个view下的,如果有需要可以加,需要保证这两个view下的所有job在view下都是唯一的 def search_job_name(self, job_id): site = self.server.get_jobs(view_name='0-Site') web = self.server.get_jobs(view_name='1-API') b = site + web job_name = '' for i in b: w = re.findall(job_id, i['name']) if w != []: job_name = i['name'] return job_name # 根据job的配置文件,找到他的git仓库地址,用于判断用户传入的分支名是否存在 def get_git_url(self, job_name): info = self.getjobconfig(job_name) convertedDict = xmltodict.parse(info) git_repo_url = convertedDict['project']['scm']['userRemoteConfigs']['hudson.plugins.git.UserRemoteConfig'][ 'url'] return git_repo_url
def _upsert_jenkins_job(server: Jenkins, full_job_name: str, xml_job_config: str): if server.job_exists(full_job_name): job = server.get_job_info(full_job_name) else: print(f'Jenkins job {full_job_name} doesn''t exist.') job = None config_xml_string = canonicalize_jenkins_xml(xml_job_config) if job is None and writeback: print(f'Creating new job {full_job_name}') try: server.create_job(full_job_name, config_xml_string) except JenkinsException as e: print(f'Failed creating job:\n{e}') elif job is not None: current_xml_string = canonicalize_jenkins_xml(server.get_job_config(full_job_name)) diffs = diff_files(BytesIO(bytearray(current_xml_string, encoding='utf-8')), BytesIO(bytearray(config_xml_string, encoding='utf-8'))) if len(diffs) > 0: print(f'Jenkins job {full_job_name} needs update') if writeback: print(json.dumps(diffs, indent=4)) if input(f'Are you sure you want to update configuration for {full_job_name} (y/n) ? ') == 'y': print(f'Updating job configuration for {full_job_name}') try: server.reconfig_job(full_job_name, config_xml_string) except JenkinsException as e: print(f'Failed updating job:\n{e}')
def post_docker_sync_hook(self, obj, cloudid): jn = obj['name'] # bid = str(obj['build']['number']) jsonUtil = JsonUtil() c = jsonUtil.parseJsonString(config.CLOUD_CONFIG) j = Jenkins(c[cloudid]['jenkins_url'], username=c[cloudid]['jenkins_name'], password=c[cloudid]['jenkins_token']) re = postimagesync() try: if j.job_exists(jn): ss = xmltodict.parse(j.get_job_config(jn)) jsonpickle.set_preferred_backend('json') if isinstance(jsonpickle.decode(ss['project']['description']), callback): desobj = jsonpickle.decode(ss['project']['description']) re.image_name = desobj.image_name re.status = self.getstatus(obj['build']['status']) re.sync_cloud_id = desobj.repo_name re.tag = desobj.tag re.time = datetime.now() re.post_callback_url = desobj.callback_url if re.status != 'error' and config.JENKINS_IMAGEOPTJOB_DELETE == 'true': j.delete_job(jn) except Exception as e: print e.message re = None raise gen.Return(re)
def getConfig(self,job,jenkins_username,jenkins_password): # server = Jenkins("https://jenkins.cn-x-cloud-pg.com.cn",username="******",password="******",timeout=3600) server = Jenkins("https://jenkins.cn-x-cloud-pg.com.cn",username=jenkins_username,password=jenkins_password,timeout=3600) jobConfig = server.get_job_config(job) root = ET.fromstring(jobConfig) for i in root.iter('url'): gitlabProject = i.text.split('/')[-1].split('.')[0] for j in root.iter('scriptPath'): scriptPath = j.text return (i.text,gitlabProject,scriptPath)
def create_jenkins_job(init_job_config, job_name): """ Create new job by getting config file from existing job """ j = Jenkins("http://localhost:8080") str_config = j.get_job_config(init_job_config) if j.job_exists(job_name) == False: j.create_job(job_name, str_config)
class JenkinsConnection(object): def __init__(self, username, password): self.jenkins = Jenkins(URL, username=username, password=password) def store_job(self, job_name): job_config = self.jenkins.get_job_config(job_name) with open(self._config_filename(job_name), 'w') as config: config.write(job_config) def get_jobs(self, view): raw_config = self.jenkins.get_view_config(view) config = ET.fromstring(raw_config) jobs = [j.text for j in config.find('jobNames').getchildren()] return filter(None, jobs)
class JenkinsConnection(object): def __init__(self, username, password): self.jenkins = Jenkins(URL, username=username, password=password) def store_job(self, job_name): job_config = self.jenkins.get_job_config(job_name) with open(self._config_filename(job_name), 'w') as config: config.write(job_config) def get_jobs(self, view): raw_config = self.jenkins.get_view_config(view) config = ET.fromstring(raw_config) jobs = [j.text for j in config.find('jobNames').getchildren()] return filter(None, jobs)
class JenkinsControl(object): war=pjoin(here, 'tmp/jenkins.war') cli=pjoin(here, 'tmp/jenkins-cli.jar') home=pjoin(here, 'tmp/jenkins') def __init__(self, addr='127.0.0.1:60888', cport='60887'): self.addr, self.port = addr.split(':') self.url = 'http://%s' % addr self.py = Jenkins(self.url) def start_server(self): cmd = pjoin(here, './bin/start-jenkins.sh 1>/dev/null 2>&1') env={'JENKINS_HOME' : self.home, 'JENKINS_PORT' : self.port, 'JENKINS_CPORT' : self.cport, 'JENKINS_ADDR' : self.addr} check_call(cmd, shell=True, env=env) def shutdown_server(self): cmd = 'echo 0 | nc %s %s' % (self.addr, self.cport) check_call(cmd, shell=True) def clean_home(self): rmtree(self.home) def createjob(self, name, configxml_fn): self.py.create_job(name, open(configxml_fn).read()) def getjobs(self): return {i['name'] : i for i in self.py.get_jobs()} def enabled(self, name): return self.py.get_job_info(name)['buildable'] def job_etree(self, job): res = self.py.get_job_config(job) res = etree.fromstring(res) return res
import sys from jenkins import Jenkins j = Jenkins(url='http://sola.local:8080', username='******', password='******') job = j.get_job_config('Route-To-Live/WebGoat-Build') print(job)
from jenkins import Jenkins job_settings = { 'DEPLOY_TO': 'feature1.dev.roundme.com', 'FRONTEND_BRANCH': '1.6.1', 'BACKEND_BRANCH': '1.6.1', 'RUN_TEST': False, 'DEPLOY_REPO_BRANCH': 'master', 'NPM_ARGS': 'production', 'GRUNT_ARGS': 'testing', 'DEPLOY_TO_S3': False, 'MIGRATE': False, 'ENV': 'feature' } server = Jenkins('http://234234', username='******', password='******') # serverstring print(server) user = server.get_whoami() print(user) job = server.get_job_config('Roundme.Full.Build') print(job) server.build_job('Roundme.Full.Build', job_settings) last_build_number = server.get_job_info( 'Roundme.Full.Build')['lastCompletedBuild']['number'] build_info = server.get_build_info('Roundme.Full.Build', last_build_number) print(build_info)
names = set() print(results) for item in results: # if item == "saas-factoring": names.add(item[0]) server = Jenkins('http://172.16.101.96:8080/', username='******', password='******') jobs = server.get_jobs() for i in jobs: # print(i['fullname']) tag = "Jenkinsfile-bak" full_name = i['fullname'] if full_name in names: config = server.get_job_config(full_name) # if full_name == "check-account": # print(config) result = re.search(r'<scriptId>(.*?)</scriptId>', config, re.M) if result: find = result.groups()[0] if find == "Jenkinsfile-Java-Business-Docker" or find == "Jenkinsfile-Java-Business": print(full_name, find) # if find == 'Jenkinsfile-bak': cc = config.replace(find, tag) server.reconfig_job(full_name, cc) # print(cc) # for item in names: # tag = 'Jenkinsfile-Java-Business' # resp = requests.get("http://172.16.101.96:8080/job/{}/config.xml".format(item), auth=auth) # if resp.status_code == 200:
def main(): parser = argparse.ArgumentParser(description='Execute CI jobs locally') parser.add_argument('jobname', help='name of the job that you want to run locally') parser.add_argument('--args', '-a', help='JSON dictionary to override job arguments') parser.add_argument('--dryrun', '-d', action='store_true', help=('dryrun with all command and parameters ' 'saved in /tmp directory')) parser.add_argument('--reason', '-r', help=('override build reason'), default=None) parser.add_argument('--dump', '-D', action='store_true', help='create dump of a CI job') args = parser.parse_args() if args.dump is True: args.dryrun = True jobname = args.jobname src = 'https://ci.openquake.org/' jjr_user = os.getenv('JJR_USER', None) jjr_pass = os.getenv('JJR_PASS', None) if jjr_user is None or jjr_pass is None: print("JJR_USER and/or JJR_PASS environment variables are not set") sys.exit(1) server = Jenkins(src, username=jjr_user, password=jjr_pass) job_conf = server.get_job_config(jobname) tree = ElementTree.fromstring(job_conf) configs = tree.find('properties').find( 'hudson.model.ParametersDefinitionProperty').find( 'parameterDefinitions') # collect params from job config params = OrderedDict() params['GEM_JENKINS_REASON'] = { 'name': 'GEM_JENKINS_REASON', 'desc': 'auto-generated' } if args.reason is None: params['GEM_JENKINS_REASON']['defa'] = ('Started by user %s' % getpass.getuser()) else: params['GEM_JENKINS_REASON']['defa'] = args.reason params['JOB_NAME'] = { 'name': 'JOB_NAME', 'defa': jobname, 'desc': 'auto-generated' } params['JENKINS_HOME'] = { 'name': 'JENKINS_HOME', 'defa': expanduser("~"), 'desc': 'auto-generated' } params['BUILD_NUMBER'] = { 'name': 'BUILD_NUMBER', 'defa': 1, 'desc': 'auto-generated' } if args.dump is False: try: g = git.cmd.Git(os.getcwd()) params['GIT_BRANCH'] = { 'name': 'GIT_BRANCH', 'defa': g.rev_parse('--abbrev-ref', 'HEAD'), 'desc': 'auto-generated' } except: print('WARNING: retrieve git informations failed') for config in configs: name = config.find('name').text desc = config.find('description').text if name == 'branch' and 'GIT_BRANCH' in params: defa = params['GIT_BRANCH']['defa'] else: defa = config.find('defaultValue').text params[name] = {'name': name, 'desc': desc, 'defa': defa} # try to get builtin_var from environment for builtin_var in builtin_vars: if builtin_var in os.environ: params[builtin_var] = os.environ[builtin_var] # integrate params with if args.args: override_args = json.loads(args.args) if type(override_args) != dict: raise TypeError('overridden args must be in a JSON dict format') for key, value in override_args.items(): prev = params.get(key, None) desc = ("%s (%s)" % (prev['desc'], "passed to jjrunner") if prev else "Passed to jjrunner") params[key] = {'name': key, 'defa': value, 'desc': desc} # collect commands from job config commands = [] commands_tree = tree.find('builders').getchildren() for command_tree in commands_tree: commands.append(command_tree.getchildren()[0].text) if args.dump is False: # check for missing built-in variables in the scripts for builtin_var in builtin_vars: if builtin_var in params: continue for command in commands: if re.search("\\b%s\\b" % builtin_var, command): print("WARNING: builtin var %s found in script\n%s" % (builtin_var, command)) if args.dump is True: if os.path.isdir(jobname): print("Folder '%s' already exists" % jobname) sys.exit(1) os.mkdir(jobname) if args.dump is True: f_args_name = os.path.join(jobname, "args.sh") f_args_inode = os.open(f_args_name, os.O_WRONLY | os.O_CREAT) else: f_args_inode, f_args_name = tempfile.mkstemp(prefix="jjrunner_args_", suffix=".sh") if args.dryrun is True and args.dump is False: print("Arguments file: %s" % f_args_name) with os.fdopen(f_args_inode, mode="w") as f_args: for key, value in params.items(): name = value['name'] desc = value['desc'] defa = value['defa'] f_args.write("# %s\n" % desc) f_args.write("export %s=\"%s\"\n\n" % (name, defa)) f_main_inode, f_main_name = tempfile.mkstemp(prefix="jjrunner_main_", suffix=".sh") f_main = os.fdopen(f_args_inode, mode="w") f_main.close() for idx, command in enumerate(commands): if args.dump is True: f_com_name = os.path.join(jobname, "com_%02d.sh" % idx) f_com_inode = os.open(f_com_name, os.O_WRONLY | os.O_CREAT) else: f_com_inode, f_com_name = tempfile.mkstemp( prefix="jjrunner_com_%02d_" % idx, suffix=".sh") f_com = os.fdopen(f_com_inode, "w") f_com.write(command) f_com.close() os.chmod(f_com_name, stat.S_IREAD | stat.S_IEXEC | stat.S_IWUSR) f_main = open(f_main_name, "w") f_main.write('#!/bin/bash\n. %s\n%s\n' % (f_args_name, f_com_name)) f_main.close() os.chmod(f_main_name, stat.S_IREAD | stat.S_IEXEC | stat.S_IWUSR) if args.dryrun is False: proc = subprocess.Popen(f_main_name) try: outs, errs = proc.communicate(timeout=3600) except TimeoutExpired: proc.kill() outs, errs = proc.communicate() os.unlink(f_com_name) if proc.returncode != 0: print("#---- command ----:\n%s\n#---- end command ----\n\n" "Returned with errorcode %d\n" % (command, proc.returncode)) print("#---- stdout ----:\n%s\n" % outs) print("#---- stderr ----:\n%s\n" % errs) sys.exit(proc.returncode) else: print("#---- command ----:\n%s\n#---- end command ----\n\n" "SUCCESS\n" % command) else: if args.dump is False: print("Command file: %s" % f_com_name) if args.dryrun is False: os.unlink(f_args_name) sys.exit(0)
from os import path from jenkins import Jenkins import getpass import xml.etree.ElementTree as ET jenkins = { 'login': input('User: '******'password': getpass.getpass(prompt='Password: '******'path': input('Folder to backup: '), 'server': input('Jenkins server: ') } server = Jenkins(jenkins['server'], username=jenkins['login'], password=jenkins['password']) for job in server.get_all_jobs(): if 'api-team' in job['fullname']: script_name = path.join(jenkins['path'], job['fullname'] + '.groovy') conf = server.get_job_config(job['fullname']) try: root = ET.fromstring(conf) script_code = root.find("./definition/script").text print('Saving script: %s' % script_name) fl = open(script_name, 'w') fl.write(script_code) fl.close() except: print('can not parse %s' % job['name'])
class jenkinscls(object): def __init__(self): self.url = config.JENKINS_URL self.username = config.JENKINS_NAME self.token = config.JENKINS_TOKEN self.j = Jenkins(config.JENKINS_URL, username=config.JENKINS_NAME, password=config.JENKINS_TOKEN) def getjobnames(self, strval=''): rs = {r'...': r'/'} s = utils.multiple_replace(str(strval), rs).split('/') return s[0], "/".join(s[1:]) def getlenstr(self, strval, n): return str(strval)[0:n] def getstatus(self, strval): if str(strval) == 'FAILURE': return 'error' elif str(strval) == 'ABORTED': return 'aborted' elif str(strval) == 'SUCCESS': return 'success' else: return 'started' def edit_userjob_config(self, jn, obj): n, r = self.getjobnames(jn) try: desobj = callback() desobj.des = obj['description'] desobj.callback_url = "" desobj.build_id = '' desobj.duration = '' desobj.namespace = n desobj.image_name = obj['image_name'] desobj.repo_name = r desobj.status = '' desobj.tag = obj['build_config']['tag_configs']['docker_repo_tag'] desobj.time = '' ss = xmltodict.parse(self.getbasejob_config()) jsonpickle.set_preferred_backend('json') ss['project']['description'] = jsonpickle.encode(desobj) ss['project']['properties']['com.tikal.hudson.plugins.notification.HudsonNotificationProperty']['endpoints'] \ ['com.tikal.hudson.plugins.notification.Endpoint']['url'] = config.JOBHOOKURL ss['project']['scm']['userRemoteConfigs']['hudson.plugins.git.UserRemoteConfig'] \ ['url'] = obj['build_config']['code_repo_clone_url'] ss['project']['scm']['branches']['hudson.plugins.git.BranchSpec'] \ ['name'] = '*/' + obj['build_config']['tag_configs']['code_repo_type_value'] ss['project']['builders']['hudson.tasks.Shell'][ 'command'] = config.JOBCOMMON1 ss['project']['builders']['com.cloudbees.dockerpublish.DockerBuilder']['registry'] \ ['url'] = config.REGISTRYURL b = str(obj['build_config']['tag_configs']['build_cache_enabled']) ss['project']['builders']['com.cloudbees.dockerpublish.DockerBuilder']['noCache'] \ = ('true' if b == 'false' else 'false') ss['project']['builders']['com.cloudbees.dockerpublish.DockerBuilder']['dockerfilePath'] \ = obj['build_config']['tag_configs']['dockerfile_location'] ss['project']['builders']['com.cloudbees.dockerpublish.DockerBuilder']['repoTag'] \ = obj['build_config']['tag_configs']['docker_repo_tag'] ss['project']['builders']['com.cloudbees.dockerpublish.DockerBuilder']['repoName'] \ = obj['image_name'] return xmltodict.unparse(ss) except Exception as e: print e.message def edit_docker_load_job_config(self, obj): try: # {docker_login} && docker import {httpfilename} {imagename} && docker push {imagename} ss = xmltodict.parse(self.getdocker_load_config()) desobj = callback() desobj.des = obj['export_file_url'] desobj.callback_url = obj['post_callback_url'] desobj.build_id = obj['build_id'] desobj.duration = '' desobj.namespace = "" desobj.repo_name = "" desobj.image_name = obj['image_name'] desobj.status = '' desobj.tag = obj['tag'] desobj.time = '' jsonpickle.set_preferred_backend('json') ss['project']['description'] = jsonpickle.encode(desobj) ss['project']['properties']['com.tikal.hudson.plugins.notification.HudsonNotificationProperty']['endpoints'] \ ['com.tikal.hudson.plugins.notification.Endpoint']['url'] = config.JOBHOOKURL tempstr = str( ss['project']['builders']['hudson.tasks.Shell']['command']) s = { r'{docker_login}': config.JOBCOMMON1, r'{httpfilename}': obj['export_file_url'], r'{imagename}': config.REGISTRYNAME + '/' + obj['image_name'] + ':' + obj['tag'] } ss['project']['builders']['hudson.tasks.Shell'][ 'command'] = utils.multiple_replace(tempstr, s) return xmltodict.unparse(ss) except Exception as e: print e.message def edit_docker_sync_job_config(self, obj): try: # {docker_login} && docker pull {oldimage} && docker tag {oldimage} {newimage} && docker push {newimage} ss = xmltodict.parse(self.getdocker_sync_config()) jsonUtil = JsonUtil() c = jsonUtil.parseJsonString(config.CLOUD_CONFIG) cid = obj['sync_cloud_id'] desobj = callback() desobj.des = "" desobj.callback_url = obj['post_callback_url'] desobj.build_id = '' desobj.duration = '' desobj.namespace = "" desobj.repo_name = obj['sync_cloud_id'] # 把cloudid 临时存在 这 desobj.image_name = obj['image_name'] desobj.status = '' desobj.tag = obj['tag'] desobj.time = '' jsonpickle.set_preferred_backend('json') ss['project']['description'] = jsonpickle.encode(desobj) ss['project']['properties']['com.tikal.hudson.plugins.notification.HudsonNotificationProperty']['endpoints'] \ ['com.tikal.hudson.plugins.notification.Endpoint']['url'] = config.JOBHOOKURL+'?cloudid='+obj['sync_cloud_id'] tempstr = str( ss['project']['builders']['hudson.tasks.Shell']['command']) s = { r'{docker_login}': c[cid]['login_common'], r'{oldimage}': config.REGISTRYNAME + '/' + obj['image_name'] + ':' + obj['tag'], r'{newimage}': c[cid]['registry_name'] + '/' + obj['image_name'] + ':' + obj['tag'] } ss['project']['builders']['hudson.tasks.Shell'][ 'command'] = utils.multiple_replace(tempstr, s) return xmltodict.unparse(ss) except Exception as e: print e.message def updateconfig_buildid(self, jn, imagename, build_id, callback_url): try: ss = xmltodict.parse(self.j.get_job_config(jn)) jsonpickle.set_preferred_backend('json') desobj = jsonpickle.decode(ss['project']['description']) if str(desobj.build_id) == str(build_id): return True desobj.build_id = build_id desobj.callback_url = callback_url desobj.image_name = imagename ss['project']['description'] = jsonpickle.encode(desobj) self.j.reconfig_job(jn, xmltodict.unparse(ss)) return True except Exception as e: print e.message return False @gen.coroutine def posthook(self, obj): # s = {r'/': r'...'} jn = obj['name'] bid = str(obj['build']['number']) # n, r = self.getjobnames(jn) re = hook() try: info = self.j.get_build_info(jn, int(bid)) if self.j.job_exists(jn): ss = xmltodict.parse(self.j.get_job_config(jn)) jsonpickle.set_preferred_backend('json') if isinstance(jsonpickle.decode(ss['project']['description']), callback): desobj = jsonpickle.decode(ss['project']['description']) re.namespace = desobj.namespace re.repo_name = desobj.repo_name re.build_id = str(obj['build']['number']) re.status = self.getstatus(obj['build']['status']) re.duration = info['duration'] re.tag = desobj.tag re.time = datetime.now() re.callurl = desobj.callback_url except Exception as e: print e.message re = None raise gen.Return(re) @gen.coroutine def post_docker_load_hook(self, obj): jn = obj['name'] bid = str(obj['build']['number']) re = postimage() try: # info = self.j.get_build_info(jn, int(bid)) if self.j.job_exists(jn): ss = xmltodict.parse(self.j.get_job_config(jn)) jsonpickle.set_preferred_backend('json') if isinstance(jsonpickle.decode(ss['project']['description']), callback): desobj = jsonpickle.decode(ss['project']['description']) re.image_name = desobj.image_name re.status = self.getstatus(obj['build']['status']) re.tag = desobj.tag re.export_file_url = desobj.des re.time = datetime.now() re.build_id = desobj.build_id re.post_callback_url = desobj.callback_url if re.status != 'error' and config.JENKINS_IMAGEOPTJOB_DELETE == 'true': self.j.delete_job(jn) except Exception as e: print e.message re = None raise gen.Return(re) @gen.coroutine def post_docker_sync_hook(self, obj, cloudid): jn = obj['name'] # bid = str(obj['build']['number']) jsonUtil = JsonUtil() c = jsonUtil.parseJsonString(config.CLOUD_CONFIG) j = Jenkins(c[cloudid]['jenkins_url'], username=c[cloudid]['jenkins_name'], password=c[cloudid]['jenkins_token']) re = postimagesync() try: if j.job_exists(jn): ss = xmltodict.parse(j.get_job_config(jn)) jsonpickle.set_preferred_backend('json') if isinstance(jsonpickle.decode(ss['project']['description']), callback): desobj = jsonpickle.decode(ss['project']['description']) re.image_name = desobj.image_name re.status = self.getstatus(obj['build']['status']) re.sync_cloud_id = desobj.repo_name re.tag = desobj.tag re.time = datetime.now() re.post_callback_url = desobj.callback_url if re.status != 'error' and config.JENKINS_IMAGEOPTJOB_DELETE == 'true': j.delete_job(jn) except Exception as e: print e.message re = None raise gen.Return(re) @gen.coroutine def createjob(self, jobname, obj): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) try: if self.j.job_exists(jn): re = createrespo(n, r, '工程已存在', 'error', datetime.now()) self.j.create_job(jn, self.edit_userjob_config(jn, obj)) re = createrespo(n, r, '', 'success', datetime.now()) except Exception as e: print e.message re = createrespo(n, r, '', 'error', datetime.now()) raise gen.Return(re) @gen.coroutine def create_docker_load_job(self, obj): # s = {r'/': r'...'} # jn = utils.multiple_replace(jobname, s) s = utils.randomstr(8) jn = '__docker_load_job_' + s re = postimage() re.created_at = datetime.now() re.image_name = obj['image_name'] re.build_id = str(obj['build_id']) re.post_callback_url = obj['post_callback_url'] re.tag = obj['tag'] re.status = 'started' try: if self.j.job_exists(jn): jn = jn + utils.randomstr(4) x = self.edit_docker_load_job_config(obj) self.j.create_job(jn, x) yield gen.sleep(0.5) self.j.build_job(jn) x = self.edit_docker_load_job_config(obj) self.j.create_job(jn, x) yield gen.sleep(0.5) self.j.build_job(jn) except Exception as e: print e.message re.status = 'error' raise gen.Return(re) @gen.coroutine def create_docker_sync_job(self, obj): # s = {r'/': r'...'} # jn = utils.multiple_replace(jobname, s) s = utils.randomstr(8) jn = '__docker_sync_job_' + s cid = obj['sync_cloud_id'] jsonUtil = JsonUtil() c = jsonUtil.parseJsonString(config.CLOUD_CONFIG) j = Jenkins(c[cid]['jenkins_url'], username=c[cid]['jenkins_name'], password=c[cid]['jenkins_token']) re = postimagesync() re.time = datetime.now() re.sync_cloud_id = obj['sync_cloud_id'] re.image_name = obj['image_name'] re.post_callback_url = obj['post_callback_url'] re.tag = obj['tag'] re.status = 'started' try: if j.job_exists(jn): jn = jn + utils.randomstr(4) j.create_job(jn, self.edit_docker_sync_job_config(obj)) yield gen.sleep(0.5) j.build_job(jn) j.create_job(jn, self.edit_docker_sync_job_config(obj)) yield gen.sleep(0.5) j.build_job(jn) except Exception as e: print e.message re.status = 'error' raise gen.Return(re) @gen.coroutine def editjob(self, jobname, obj): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) try: if self.j.job_exists(jn): self.j.reconfig_job(jn, self.edit_userjob_config(jn, obj)) re = createrespo(n, r, '', 'success', datetime.now()) else: re = createrespo(n, r, 'repo is not find', 'error', datetime.now()) except Exception as e: print e.message re = createrespo(n, r, '', 'error', datetime.now()) raise gen.Return(re) @gen.coroutine def getjobinfo(self, jobname): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) re = jobinfo() try: if self.j.job_exists(jn): re.namespace = n re.repo_name = r re.info = self.j.get_job_info(jn) except Exception as e: print e.message re.namespace = n re.repo_name = r re.info = "" raise gen.Return(re) @gen.coroutine def deljob(self, jobname): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) try: if self.j.job_exists(jn): self.j.delete_job(jn) re = delrespo(n, r, 'success') except Exception as e: print e.message re = delrespo(n, r, 'error') raise gen.Return(re) @gen.coroutine def stopbuild(self, jobname, build_id): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) try: if self.j.job_exists(jn) and self.j.get_build_info( jn, int(build_id)): self.j.stop_build(jn, int(build_id)) re = delbuild(n, r, build_id, 'aborted') else: re = delbuild(n, r, build_id, 'error') except Exception as e: print e.message re = delbuild(n, r, build_id, 'error') raise gen.Return(re) @gen.coroutine def postbuild(self, jobname, imagename, tag, callback_url): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) try: if self.j.job_exists(jn): j = self.j.get_job_info(jn) build_id = j['nextBuildNumber'] if self.j.get_queue_info() != []: re = postbuild(n, r, imagename, build_id, tag, datetime.now(), 'queue') elif j['queueItem'] != None: re = postbuild(n, r, imagename, build_id, tag, datetime.now(), 'queue') else: self.updateconfig_buildid(jn, imagename, build_id, callback_url) self.j.build_job(jn) re = postbuild(n, r, imagename, build_id, tag, datetime.now(), 'started') else: re = postbuild(n, r, '', '', datetime.now(), 'error') except Exception as e: print e.message re = postbuild(n, r, '', tag, datetime.now(), 'error') raise gen.Return(re) @gen.coroutine def getbuild(self, jobname, build_id): s = {r'/': r'...'} jn = utils.multiple_replace(jobname, s) n, r = self.getjobnames(jn) try: b = self.j.get_build_info(jn, int(build_id)) building = b['building'] duration = b['duration'] dt = self.getlenstr(b['timestamp'], 10) started_at = utils.timestamp_datetime(int(dt)) status = self.getstatus(b['result']) stdout = self.j.get_build_console_output(jn, int(build_id)) bd = build_detail(n, r, build_id, building, started_at, duration, status, stdout) except Exception as e: print e.message bd = build_detail(n, r, build_id, '', '', '', 'error', '') raise gen.Return(bd) def getdocker_sync_config(self): s = '''<?xml version='1.0' encoding='UTF-8'?> <project> <actions/> <description></description> <keepDependencies>false</keepDependencies> <properties> <com.tikal.hudson.plugins.notification.HudsonNotificationProperty plugin="[email protected]"> <endpoints> <com.tikal.hudson.plugins.notification.Endpoint> <protocol>HTTP</protocol> <format>JSON</format> <url>http://10.1.39.60:8080/v1/hook</url> <event>completed</event> <timeout>30000</timeout> <loglines>0</loglines> </com.tikal.hudson.plugins.notification.Endpoint> </endpoints> </com.tikal.hudson.plugins.notification.HudsonNotificationProperty> <com.synopsys.arc.jenkins.plugins.ownership.jobs.JobOwnerJobProperty plugin="[email protected]"/> <hudson.plugins.heavy__job.HeavyJobProperty plugin="[email protected]"> <weight>1</weight> </hudson.plugins.heavy__job.HeavyJobProperty> <jenkins.model.BuildDiscarderProperty> <strategy class="hudson.tasks.LogRotator"> <daysToKeep>30</daysToKeep> <numToKeep>50</numToKeep> <artifactDaysToKeep>-1</artifactDaysToKeep> <artifactNumToKeep>-1</artifactNumToKeep> </strategy> </jenkins.model.BuildDiscarderProperty> <job-metadata plugin="[email protected]"> <values class="linked-list"> <metadata-tree> <name>job-info</name> <parent class="job-metadata" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-tree> <name>last-saved</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-date> <name>time</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value> <time>1458098001639</time> <timezone>Asia/Shanghai</timezone> </value> <checked>false</checked> </metadata-date> <metadata-tree> <name>user</name> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-string> <name>display-name</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value>admin</value> </metadata-string> <metadata-string> <name>full-name</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value>admin</value> </metadata-string> </children> </metadata-tree> </children> </metadata-tree> </children> </metadata-tree> </values> </job-metadata> </properties> <scm class="hudson.scm.NullSCM"/> <scmCheckoutRetryCount>3</scmCheckoutRetryCount> <canRoam>true</canRoam> <disabled>false</disabled> <blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding> <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding> <triggers/> <concurrentBuild>false</concurrentBuild> <builders> <hudson.tasks.Shell> <command>{docker_login} && docker pull {oldimage} && docker tag --force=true {oldimage} {newimage} && docker push {newimage} </command> </hudson.tasks.Shell> </builders> <publishers/> <buildWrappers> <hudson.plugins.ansicolor.AnsiColorBuildWrapper plugin="[email protected]"> <colorMapName>xterm</colorMapName> </hudson.plugins.ansicolor.AnsiColorBuildWrapper> </buildWrappers> </project>''' return s def getdocker_load_config(self): s = '''<?xml version='1.0' encoding='UTF-8'?> <project> <actions/> <description></description> <keepDependencies>false</keepDependencies> <properties> <com.tikal.hudson.plugins.notification.HudsonNotificationProperty plugin="[email protected]"> <endpoints> <com.tikal.hudson.plugins.notification.Endpoint> <protocol>HTTP</protocol> <format>JSON</format> <url>http://10.1.39.60:8080/v1/hook</url> <event>completed</event> <timeout>30000</timeout> <loglines>0</loglines> </com.tikal.hudson.plugins.notification.Endpoint> </endpoints> </com.tikal.hudson.plugins.notification.HudsonNotificationProperty> <com.synopsys.arc.jenkins.plugins.ownership.jobs.JobOwnerJobProperty plugin="[email protected]"/> <hudson.plugins.heavy__job.HeavyJobProperty plugin="[email protected]"> <weight>1</weight> </hudson.plugins.heavy__job.HeavyJobProperty> <jenkins.model.BuildDiscarderProperty> <strategy class="hudson.tasks.LogRotator"> <daysToKeep>30</daysToKeep> <numToKeep>50</numToKeep> <artifactDaysToKeep>-1</artifactDaysToKeep> <artifactNumToKeep>-1</artifactNumToKeep> </strategy> </jenkins.model.BuildDiscarderProperty> <job-metadata plugin="[email protected]"> <values class="linked-list"> <metadata-tree> <name>job-info</name> <parent class="job-metadata" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-tree> <name>last-saved</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-date> <name>time</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value> <time>1458097635464</time> <timezone>Asia/Shanghai</timezone> </value> <checked>false</checked> </metadata-date> <metadata-tree> <name>user</name> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-string> <name>display-name</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value>admin</value> </metadata-string> <metadata-string> <name>full-name</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value>admin</value> </metadata-string> </children> </metadata-tree> </children> </metadata-tree> </children> </metadata-tree> </values> </job-metadata> </properties> <scm class="hudson.scm.NullSCM"/> <scmCheckoutRetryCount>3</scmCheckoutRetryCount> <canRoam>true</canRoam> <disabled>false</disabled> <blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding> <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding> <triggers/> <concurrentBuild>false</concurrentBuild> <builders> <hudson.tasks.Shell> <command>{docker_login} && docker import {httpfilename} {imagename} && docker push {imagename} </command> </hudson.tasks.Shell> </builders> <publishers/> <buildWrappers> <hudson.plugins.ansicolor.AnsiColorBuildWrapper plugin="[email protected]"> <colorMapName>xterm</colorMapName> </hudson.plugins.ansicolor.AnsiColorBuildWrapper> </buildWrappers> </project>''' return s def getbasejob_config(self): s = '''<?xml version='1.0' encoding='UTF-8'?> <project> <actions/> <description></description> <keepDependencies>false</keepDependencies> <properties> <com.tikal.hudson.plugins.notification.HudsonNotificationProperty plugin="[email protected]"> <endpoints> <com.tikal.hudson.plugins.notification.Endpoint> <protocol>HTTP</protocol> <format>JSON</format> <url>http://10.1.39.60:8080/v1/hook</url> <event>completed</event> <timeout>30000</timeout> <loglines>0</loglines> </com.tikal.hudson.plugins.notification.Endpoint> </endpoints> </com.tikal.hudson.plugins.notification.HudsonNotificationProperty> <com.synopsys.arc.jenkins.plugins.ownership.jobs.JobOwnerJobProperty plugin="[email protected]"/> <hudson.plugins.heavy__job.HeavyJobProperty plugin="[email protected]"> <weight>1</weight> </hudson.plugins.heavy__job.HeavyJobProperty> <jenkins.model.BuildDiscarderProperty> <strategy class="hudson.tasks.LogRotator"> <daysToKeep>30</daysToKeep> <numToKeep>50</numToKeep> <artifactDaysToKeep>-1</artifactDaysToKeep> <artifactNumToKeep>-1</artifactNumToKeep> </strategy> </jenkins.model.BuildDiscarderProperty> <job-metadata plugin="[email protected]"> <values class="linked-list"> <metadata-tree> <name>job-info</name> <parent class="job-metadata" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-tree> <name>last-saved</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-date> <name>time</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value> <time>1457958794480</time> <timezone>Asia/Shanghai</timezone> </value> <checked>false</checked> </metadata-date> <metadata-tree> <name>user</name> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <children class="linked-list"> <metadata-string> <name>display-name</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value>admin</value> </metadata-string> <metadata-string> <name>full-name</name> <description></description> <parent class="metadata-tree" reference="../../.."/> <generated>true</generated> <exposedToEnvironment>false</exposedToEnvironment> <value>admin</value> </metadata-string> </children> </metadata-tree> </children> </metadata-tree> </children> </metadata-tree> </values> </job-metadata> </properties> <scm class="hudson.plugins.git.GitSCM" plugin="[email protected]"> <configVersion>2</configVersion> <userRemoteConfigs> <hudson.plugins.git.UserRemoteConfig> <url>https://github.com/zhwenh/dockerfile-jdk-tomcat.git</url> </hudson.plugins.git.UserRemoteConfig> </userRemoteConfigs> <branches> <hudson.plugins.git.BranchSpec> <name>*/master</name> </hudson.plugins.git.BranchSpec> </branches> <doGenerateSubmoduleConfigurations>false</doGenerateSubmoduleConfigurations> <browser class="hudson.plugins.git.browser.GitLab"> <url></url> <version>7.11</version> </browser> <submoduleCfg class="list"/> <extensions/> </scm> <scmCheckoutRetryCount>3</scmCheckoutRetryCount> <canRoam>true</canRoam> <disabled>false</disabled> <blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding> <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding> <triggers/> <concurrentBuild>false</concurrentBuild> <builders> <hudson.tasks.Shell> <command>docker login -u admin -p admin123 -e [email protected] registry.test.com</command> </hudson.tasks.Shell> <com.cloudbees.dockerpublish.DockerBuilder plugin="[email protected]"> <server plugin="[email protected]"> <uri>unix:///var/run/docker.sock</uri> </server> <registry plugin="[email protected]"> <url>http://registry.test.com/v2</url> </registry> <repoName>zhwenh/tomcat</repoName> <noCache>false</noCache> <forcePull>true</forcePull> <dockerfilePath>./Dockerfile</dockerfilePath> <skipBuild>false</skipBuild> <skipDecorate>false</skipDecorate> <repoTag>2.3.1</repoTag> <skipPush>false</skipPush> <createFingerprint>true</createFingerprint> <skipTagLatest>false</skipTagLatest> <buildAdditionalArgs></buildAdditionalArgs> <forceTag>true</forceTag> </com.cloudbees.dockerpublish.DockerBuilder> </builders> <publishers> <hudson.plugins.emailext.ExtendedEmailPublisher plugin="[email protected]"> <recipientList>$DEFAULT_RECIPIENTS</recipientList> <configuredTriggers> <hudson.plugins.emailext.plugins.trigger.FailureTrigger> <email> <recipientList>$DEFAULT_RECIPIENTS</recipientList> <subject>$PROJECT_DEFAULT_SUBJECT</subject> <body>$PROJECT_DEFAULT_CONTENT</body> <recipientProviders> <hudson.plugins.emailext.plugins.recipients.DevelopersRecipientProvider/> </recipientProviders> <attachmentsPattern></attachmentsPattern> <attachBuildLog>false</attachBuildLog> <compressBuildLog>false</compressBuildLog> <replyTo>$PROJECT_DEFAULT_REPLYTO</replyTo> <contentType>project</contentType> </email> </hudson.plugins.emailext.plugins.trigger.FailureTrigger> <hudson.plugins.emailext.plugins.trigger.SuccessTrigger> <email> <recipientList>$DEFAULT_RECIPIENTS</recipientList> <subject>$PROJECT_DEFAULT_SUBJECT</subject> <body>$PROJECT_DEFAULT_CONTENT</body> <recipientProviders> <hudson.plugins.emailext.plugins.recipients.DevelopersRecipientProvider/> </recipientProviders> <attachmentsPattern></attachmentsPattern> <attachBuildLog>false</attachBuildLog> <compressBuildLog>false</compressBuildLog> <replyTo>$PROJECT_DEFAULT_REPLYTO</replyTo> <contentType>project</contentType> </email> </hudson.plugins.emailext.plugins.trigger.SuccessTrigger> </configuredTriggers> <contentType>default</contentType> <defaultSubject>$DEFAULT_SUBJECT</defaultSubject> <defaultContent>$DEFAULT_CONTENT</defaultContent> <attachmentsPattern></attachmentsPattern> <presendScript>$DEFAULT_PRESEND_SCRIPT</presendScript> <attachBuildLog>false</attachBuildLog> <compressBuildLog>false</compressBuildLog> <replyTo></replyTo> <saveOutput>false</saveOutput> <disabled>false</disabled> </hudson.plugins.emailext.ExtendedEmailPublisher> </publishers> <buildWrappers> <hudson.plugins.ansicolor.AnsiColorBuildWrapper plugin="[email protected]"> <colorMapName>xterm</colorMapName> </hudson.plugins.ansicolor.AnsiColorBuildWrapper> </buildWrappers> </project>''' return s @gen.coroutine def createbasejob(self): s = self.getbasejob_config() try: self.j.create_job(config.JENKINS_BASEJOB, s) except Exception as e: print e.message raise gen.Return(False) raise gen.Return(True)