def job_20_test_all(self, cr, uid, build, lock_path, log_path): 'create docker container' if not build.branch_id.repo_id.is_travis2docker_build: return super(RunbotBuild, self).job_20_test_all(cr, uid, build, lock_path, log_path) if not build.docker_image or not build.dockerfile_path \ or build.result == 'skipped': _logger.info('docker build skipping job_20_test_all') return MAGIC_PID_RUN_NEXT_JOB run(['docker', 'rm', '-f', build.docker_container]) cmd = [ 'docker', 'run', '-e', 'INSTANCE_ALIVE=1', '-e', 'RUNBOT=1', '-e', 'UNBUFFER=1', '-p', '%d:%d' % (build.port, 8069), '--name=' + build.docker_container, '-t', build.docker_image, ] return self.spawn(cmd, lock_path, log_path)
def pg_createdb(self, cr, uid, dbname): self.pg_dropdb(cr, uid, dbname) _logger.debug("createdb %s", dbname) cmd = ['createdb', '--encoding=unicode', '--lc-collate=C', '--template=template0', dbname] if config['db_user']: cmd += ['--username', config['db_user']] run(cmd)
def schedule(self, cr, uid, ids, context=None): res = super(RunbotBuild, self).schedule(cr, uid, ids, context=context) current_host = fqdn() for build in self.browse(cr, uid, ids, context=context): if not all([build.state == 'running', build.job == 'job_30_run', build.result in ['ok', 'warn'], not build.docker_executed_commands, build.repo_id.is_travis2docker_build]): continue time.sleep(20) build.write({'docker_executed_commands': True}) run(['docker', 'exec', '-d', '--user', 'root', build.docker_container, '/etc/init.d/ssh', 'start']) ssh_keys = self.get_ssh_keys(cr, uid, build, context=context) or '' f_extra_keys = os.path.expanduser('~/.ssh/runbot_authorized_keys') if os.path.isfile(f_extra_keys): with open(f_extra_keys) as fobj_extra_keys: ssh_keys += "\n" + fobj_extra_keys.read() ssh_keys = ssh_keys.strip(" \n") if ssh_keys: run(['docker', 'exec', '-d', '--user', 'odoo', build.docker_container, "bash", "-c", "echo '%(keys)s' | tee -a '%(dir)s'" % dict( keys=ssh_keys, dir="/home/odoo/.ssh/authorized_keys")]) if current_host == build.host: urlopen_t = threading.Thread(target=RunbotBuild._open_url, args=(build.port,)) urlopen_t.start() return res
def pg_createdb(self, cr, uid, dbname): self.pg_dropdb(cr, uid, dbname) _logger.debug("createdb %s", dbname) cmd = [ 'createdb', '--encoding=unicode', '--lc-collate=C', '--template=template0', dbname ] if config['db_user']: cmd += ['--username', config['db_user']] run(cmd)
def job_20_test_all(self, cr, uid, build, lock_path, log_path): 'create docker container' if not build.branch_id.repo_id.is_travis2docker_build: return super(RunbotBuild, self).job_20_test_all(cr, uid, build, lock_path, log_path) if not build.docker_image or not build.dockerfile_path \ or build.result == 'skipped': _logger.info('docker build skipping job_20_test_all') return MAGIC_PID_RUN_NEXT_JOB run(['docker', 'rm', '-vf', build.docker_container]) pr_cmd_env = [ '-e', 'TRAVIS_PULL_REQUEST=' + build.branch_id.branch_name, '-e', 'CI_PULL_REQUEST=' + build.branch_id.branch_name, ] if 'refs/pull/' in build.branch_id.name else [ '-e', 'TRAVIS_PULL_REQUEST=false', ] travis_branch = build._get_closest_branch_name( build.repo_id.id)[1].split('/')[-1] cmd = [ 'docker', 'run', '-e', 'INSTANCE_ALIVE=1', '-e', 'TRAVIS_BRANCH=' + travis_branch, '-e', 'TRAVIS_COMMIT=' + build.name, '-e', 'RUNBOT=1', '-e', 'UNBUFFER=0', '-e', 'START_SSH=1', '-e', 'TEST_ENABLE=%d' % (not build.repo_id.travis2docker_test_disable), '-p', '%d:%d' % (build.port, 8069), '-p', '%d:%d' % (build.port + 1, 22), '--name=' + build.docker_container, '-t', build.docker_image, ] + pr_cmd_env logdb = cr.dbname if config['db_host'] and not travis_branch.startswith('7.0'): logdb = 'postgres://{cfg[db_user]}:{cfg[db_password]}@' +\ '{cfg[db_host]}/{db}'.format(cfg=config, db=cr.dbname) cmd += ['-e', 'SERVER_OPTIONS="--log-db=%s"' % logdb] return self.spawn(cmd, lock_path, log_path)
def reload_nginx(self, cr, uid, context=None): """ completely override the method """ settings = {} settings['port'] = config['xmlrpc_port'] nginx_dir = os.path.join(self.root(cr, uid), 'nginx') settings['nginx_dir'] = nginx_dir ids = self.search(cr, uid, [('nginx','=',True)], order='id') if ids: build_ids = self.pool['runbot.build'].search(cr, uid, [('repo_id','in',ids), ('state','=','running')]) settings['builds'] = self.pool['runbot.build'].browse(cr, uid, build_ids) nginx_config = self.pool['ir.ui.view'].render(cr, uid, "runbot.nginx_config", settings) mkdirs([nginx_dir]) open(os.path.join(nginx_dir, 'nginx.conf'),'w').write(nginx_config) _logger.debug('reload nginx') run(['sudo', '/bin/systemctl', 'reload', 'nginx'])
def reload_nginx(self, cr, uid, context=None): """ completely override the method """ settings = {} settings['port'] = config['xmlrpc_port'] nginx_dir = os.path.join(self.root(cr, uid), 'nginx') settings['nginx_dir'] = nginx_dir ids = self.search(cr, uid, [('nginx', '=', True)], order='id') if ids: build_ids = self.pool['runbot.build'].search( cr, uid, [('repo_id', 'in', ids), ('state', '=', 'running')]) settings['builds'] = self.pool['runbot.build'].browse( cr, uid, build_ids) nginx_config = self.pool['ir.ui.view'].render( cr, uid, "runbot.nginx_config", settings) mkdirs([nginx_dir]) open(os.path.join(nginx_dir, 'nginx.conf'), 'w').write(nginx_config) _logger.debug('reload nginx') run(['sudo', '/usr/sbin/service', 'nginx', 'reload'])
def create_image_cache(self): for build in self: if not build.is_pull_request and build.result in ['ok', 'warn'] \ and build.repo_id.use_docker_cache: image_cached = build.get_docker_image(build.branch_closest) cmd = [ 'docker', 'commit', '-m', 'runbot_cache', build.docker_container, image_cached, ] _logger.info('Generating image cache: ' + ' '.join(cmd)) run(cmd) if build.repo_id.docker_registry_server: cmd = ['docker', 'push', image_cached] _logger.info('Pushing image: ' + ' '.join(cmd)) # Method `run` show `error interrupted system call` in CI sp = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) for line in iter(sp.stdout.readline, ''): # Add info log to avoid a `without ouput` error in CI _logger.info(line.strip('\n\r ')) err = sp.stderr.read() if err: _logger.error(err)
def update_lang(self): """Set lang to all users into '-all' database""" if self.lang: db_name = "%s-all" % self.dest # All odoo versions has openerp/release.py file sys.path.insert(0, self.server("openerp")) try: release = __import__("release") finally: sys.path.pop(0) if release.version_info < (7, 0): # Old version used `res_users` table to set `lang` run(['psql', db_name, '-c', "UPDATE res_users SET lang='{lang}';".format( lang=self.lang)]) else: # New version use `res_partner` table to set `lang` run(['psql', db_name, '-c', "UPDATE res_partner SET lang='{lang}' ".format( lang=self.lang) + "WHERE id IN (SELECT partner_id FROM res_users);"]) return True
def use_build_cache(self): """Check if a build is candidate to use cache. * Change in .travis.yml then don't use cache. * The image base don't exists then don't use cache. * The repo has use_docker_cache==False then don't use cache. """ self.ensure_one() build = self # Check if the repo has use_docker_cache use_cache = build.repo_id.use_docker_cache if not use_cache: return use_cache # Check if the build has a change in .travis.yml file is_changed_travis_yml = build.repo_id.git([ 'diff', '--name-only', build.branch_closest + '..' + build.name, '--', '.travis.yml']) use_cache = not is_changed_travis_yml if not use_cache: return use_cache # Check if exists the image if build.repo_id.docker_registry_server: cmd = ["docker", "pull", build.docker_image_cache] _logger.info("Pulling image cache: %s", ' '.join(cmd)) run(cmd) current_docker_images = self.get_docker_images() if build.docker_image_cache not in current_docker_images: _logger.warning( "Image cache '%s' don't exists for build %d with branch %s.", build.docker_image_cache, build.sequence, build.branch_id.name) use_cache = False return use_cache
def job_21_coverage(self, build, lock_path, log_path): if not build.repo_id.use_coverage: return output = build.path('logs/job_21_coverage') mkdirs([output]) result = None with build._chdir(): result = run( build.repo_id._coverage_command('html', '--directory', output, '--title', build.name)) if result: build.write({ 'result': 'ko', }) build.github_status() output = os.path.join(output, 'index.html') if os.path.exists(output): doc = etree.fromstring(open(output).read(), etree.HTMLParser()) coverage = 0.0 for node in doc.xpath("//tr[@class='total']/td[@data-ratio]"): covered_lines, all_lines = node.get('data-ratio').split() coverage = float(covered_lines or 0) / float(all_lines or 1) coverage *= 100 build.write({ 'coverage': coverage, }) version = (build.branch_id.branch_name or '').split('-')[0] target_build = self.env['runbot.build'].search( [('id', 'not in', build.ids), ('repo_id', 'in', build.repo_id.ids), ('branch_id.branch_name', '=', version)], limit=1) if target_build.coverage and target_build.coverage > coverage: build._log( 'coverage', 'coverage dropped from %.2f in %s to %.2f' % (target_build.coverage, target_build.branch_id.branch_name, coverage)) build.write({ 'result': 'ko', }) build.github_status() return result
def _local_cleanup(self, cr, uid, ids, context=None): for build in self.browse(cr, uid, ids, context=context): if build.docker_container: run(['docker', 'rm', '-f', build.docker_container]) run(['docker', 'rmi', '-f', build.docker_image])
def docker_rm_image(self): for build in self: run(['docker', 'rmi', '-f', build.docker_image])
def docker_rm_container(self): for build in self: run(['docker', 'rm', '-vf', build.docker_container])