def check_file_exist(self, data, base_path="/repo"): jobs = data.get('jobs', []) for job in jobs: job_type = job['type'] if job_type == "docker": dockerfile = job['docker_file'] p = os.path.join(base_path, dockerfile) if not os.path.exists(p): raise Failure("%s does not exist" % p) if job_type == "docker-compose": composefile = job['docker_compose_file'] p = os.path.join(base_path, composefile) if not os.path.exists(p): raise Failure("%s does not exist" % p) # validate it try: create_from(p) except Exception as e: raise Failure("%s: %s" % (p, e.message)) if job_type == "workflow": workflowfile = job['infrabox_file'] p = os.path.join(base_path, workflowfile) if not os.path.exists(p): raise Failure("%s does not exist" % p)
def run_exception(path, message): path = os.path.join(os.path.dirname(os.path.realpath(__file__)), path) with assert_raises(Exception) as e: create_from(path) eq_(e.exception.message, message)
def check_file_exist(self, data, infrabox_context): jobs = data.get('jobs', []) for job in jobs: job_type = job['type'] if job_type == "docker": job_build_context = job.get('build_context', None) build_context = self._get_build_context_impl( job_build_context, infrabox_context) dockerfile = os.path.normpath( os.path.join(build_context, job['docker_file'])) p = os.path.join(infrabox_context, dockerfile) if not os.path.exists(p): raise Failure("%s does not exist" % p) if job_type == "docker-compose": composefile = job['docker_compose_file'] p = os.path.join(infrabox_context, composefile) if not os.path.exists(p): return # might be dynamically generated # validate it try: create_from(p) except Exception as e: raise Failure("%s: %s" % (p, e.message)) if job_type == "workflow": workflowfile = job['infrabox_file'] p = os.path.join(infrabox_context, workflowfile) if not os.path.exists(p): raise Failure("%s does not exist" % p)
def run_exception(self, path, message): path = os.path.join(os.path.dirname(os.path.realpath(__file__)), path) try: create_from(path) assert False except Exception as e: print e self.assertEqual(e.message, message)
def test_valid_1(self): create_from('./tests/test/valid_1.yml')
def build_and_run_docker_compose(args, job): create_infrabox_directories(args, job) compose_file = os.path.join(job['infrabox_context'], job['docker_compose_file']) compose_file = os.path.normpath(compose_file) compose_file_new = compose_file + ".infrabox" # rewrite compose file compose_file_content = docker_compose.create_from(compose_file) for service in compose_file_content['services']: create_infrabox_directories(args, job, service=service, services=compose_file_content['services'], compose_file=compose_file) volumes = [] for v in compose_file_content['services'][service].get('volumes', []): if isinstance(v, basestring): v = v.replace('/infrabox/context', args.project_root) volumes.append(v) for name, path in job['directories'].items(): volumes.append(str('%s:/infrabox/%s' % (path, name))) # Mount /infrabox/context to the build context of the service if build.context # is set in the compose file for the service service_build = compose_file_content['services'][service].get( 'build', None) if service_build: service_build_context = service_build.get('context', None) if service_build_context: build_context = os.path.join(os.path.dirname(compose_file), service_build_context) volumes += ['%s:/infrabox/context' % str(build_context)] else: volumes += ['%s:/infrabox/context' % args.project_root] else: volumes += ['%s:/infrabox/context' % args.project_root] volumes = list(dict.fromkeys(volumes)) compose_file_content['services'][service]['volumes'] = volumes build = compose_file_content['services'][service].get('build', None) if build: if not build.get('args', None): build['args'] = [] elif not any([ build_arg.startswith("INFRABOX_BUILD_NUMBER=") for build_arg in build['args'] ]): build['args'] += ['INFRABOX_BUILD_NUMBER=local'] with open(compose_file_new, "w+") as out: yaml.dump(compose_file_content, out, default_flow_style=False) env = { 'PATH': os.environ['PATH'], 'INFRABOX_CLI': 'true', 'INFRABOX_BUILD_NUMBER': 'local' } if 'environment' in job: for name, value in job['environment'].items(): if isinstance(value, dict): env[name] = get_secret(args, value['$secret']) else: env[name] = value if not args.no_rm: execute([ 'docker-compose', '-p', args.project_name, '-f', compose_file_new, 'rm', '-f' ], env=env, cwd=job['build_context']) execute([ 'docker-compose', '-p', args.project_name, '-f', compose_file_new, 'build' ], env=env, cwd=job['build_context']) def signal_handler(_, __): logger.info("Stopping docker containers") execute(['docker-compose', '-f', compose_file_new, 'stop'], env=env, cwd=job['build_context']) os.remove(compose_file_new) sys.exit(0) signal.signal(signal.SIGINT, signal_handler) execute([ 'docker-compose', '-p', args.project_name, '-f', compose_file_new, 'up', '--abort-on-container-exit' ], env=env) signal.signal(signal.SIGINT, signal.SIG_DFL) # Print the return code of all the containers execute([ 'docker-compose', '-p', args.project_name, '-f', compose_file_new, 'ps' ], env=env, cwd=job['build_context']) os.remove(compose_file_new)
def build_and_run_docker_compose(args, job): create_infrabox_directories(args, job) compose_file = os.path.join(job['infrabox_context'], job['docker_compose_file']) compose_file = os.path.normpath(compose_file) compose_file_new = compose_file + ".infrabox" # rewrite compose file compose_file_content = docker_compose.create_from(compose_file) for service in compose_file_content['services']: create_infrabox_directories(args, job, service=service, services=compose_file_content['services'], compose_file=compose_file) volumes = [] for v in compose_file_content['services'][service].get('volumes', []): v = v.replace('/infrabox/context', args.project_root) volumes.append(v) for name, path in job['directories'].items(): volumes.append(str('%s:/infrabox/%s' % (path, name))) compose_file_content['services'][service]['volumes'] = volumes with open(compose_file_new, "w+") as out: yaml.dump(compose_file_content, out, default_flow_style=False) env = {'PATH': os.environ['PATH'], 'INFRABOX_CLI': 'true'} if 'environment' in job: for name, value in job['environment'].items(): if isinstance(value, dict): env[name] = get_secret(args, value['$secret']) else: env[name] = value if not args.no_rm: execute([ 'docker-compose', '-p', args.project_name, '-f', compose_file_new, 'rm', '-f' ], env=env, cwd=job['build_context']) execute([ 'docker-compose', '-p', args.project_name, '-f', compose_file_new, 'build' ], env=env, cwd=job['build_context']) def signal_handler(_, __): logger.info("Stopping docker containers") execute(['docker-compose', '-f', compose_file_new, 'stop'], env=env, cwd=job['build_context']) os.remove(compose_file_new) sys.exit(0) signal.signal(signal.SIGINT, signal_handler) execute([ 'docker-compose', '-p', args.project_name, '-f', compose_file_new, 'up', '--abort-on-container-exit' ], env=env) signal.signal(signal.SIGINT, signal.SIG_DFL) os.remove(compose_file_new)
def run_job_docker_compose(self, c): c.header("Build containers", show=True) f = self.job['dockerfile'] compose_file = os.path.normpath(os.path.join(self.job['definition']['infrabox_context'], f)) compose_file_new = compose_file + ".infrabox.json" # rewrite compose file compose_file_content = create_from(compose_file) for service in compose_file_content['services']: service_cache_dir = os.path.join(self.infrabox_cache_dir, service) if not os.path.exists(service_cache_dir): makedirs(service_cache_dir) service_output_dir = os.path.join(self.infrabox_output_dir, service) makedirs(service_output_dir) service_testresult_dir = os.path.join(self.infrabox_testresult_dir, service) makedirs(service_testresult_dir) service_coverage_dir = os.path.join(self.infrabox_coverage_dir, service) makedirs(service_coverage_dir) service_markdown_dir = os.path.join(self.infrabox_markdown_dir, service) makedirs(service_markdown_dir) service_markup_dir = os.path.join(self.infrabox_markup_dir, service) makedirs(service_markup_dir) service_badge_dir = os.path.join(self.infrabox_badge_dir, service) makedirs(service_badge_dir) service_archive_dir = os.path.join(self.infrabox_archive_dir, service) makedirs(service_archive_dir) service_volumes = [ "%s:/infrabox/cache" % service_cache_dir, "%s:/infrabox/inputs" % self.infrabox_inputs_dir, "%s:/infrabox/output" % service_output_dir, "%s:/infrabox/upload/testresult" % service_testresult_dir, "%s:/infrabox/upload/markdown" % service_markdown_dir, "%s:/infrabox/upload/markup" % service_markup_dir, "%s:/infrabox/upload/badge" % service_badge_dir, "%s:/infrabox/upload/coverage" % service_coverage_dir, "%s:/infrabox/upload/archive" % service_archive_dir, ] for v in compose_file_content['services'][service].get('volumes', []): v = v.replace('/infrabox/context', self.mount_repo_dir) service_volumes.append(v) # Mount /infrabox/context to the build context of the service if build.context # is set in the compose file for the service service_build = compose_file_content['services'][service].get('build', None) if service_build: service_build_context = service_build.get('context', None) if service_build_context: build_context = os.path.join(os.path.dirname(compose_file), service_build_context) service_volumes += ['%s:/infrabox/context' % build_context] else: service_volumes += ['%s:/infrabox/context' % self.mount_repo_dir] else: service_volumes += ['%s:/infrabox/context' % self.mount_repo_dir] if os.environ['INFRABOX_LOCAL_CACHE_ENABLED'] == 'true': service_volumes.append("/local-cache:/infrabox/local-cache") compose_file_content['services'][service]['volumes'] = service_volumes image_name = get_registry_name() + '/' \ + self.project['id'] + '/' \ + self.job['name'] + '/' \ + service image_name_latest = image_name + ':latest' build = compose_file_content['services'][service].get('build', None) if build: compose_file_content['services'][service]['image'] = image_name_latest build['cache_from'] = [image_name_latest] self.get_cached_image(image_name_latest) if not build.get('args', None): build['args'] = [] build['args'] += ['INFRABOX_BUILD_NUMBER=%s' % self.build['build_number']] with open(compose_file_new, "w+") as out: json.dump(compose_file_content, out) collector = StatsCollector() try: try: c.execute(['docker-compose', '-f', compose_file_new, 'rm'], env=self.environment) except Exception as e: logger.exception(e) self.environment['PATH'] = os.environ['PATH'] c.execute(['docker-compose', '-f', compose_file_new, 'build'], show=True, env=self.environment) c.header("Run docker-compose", show=True) cwd = self._get_build_context_current_job() c.execute(['docker-compose', '-f', compose_file_new, 'up', '--abort-on-container-exit'], env=self.environment, show=True, cwd=cwd) c.execute(['docker-compose', '-f', compose_file_new, 'ps'], env=self.environment, cwd=cwd, show=True) c.execute(['get_compose_exit_code.sh', compose_file_new], env=self.environment, cwd=cwd, show=True) except: raise Failure("Failed to build and run container") finally: c.header("Finalize", show=True) try: collector.stop() self.post_stats(collector.get_result()) c.execute(['docker-compose', '-f', compose_file_new, 'rm'], env=self.environment) except Exception as e: logger.exception(e) for service in compose_file_content['services']: image_name = get_registry_name() + '/' \ + self.project['id'] + '/' \ + self.job['name'] + '/' \ + service image_name_latest = image_name + ':latest' build = compose_file_content['services'][service].get('build', None) if build: compose_file_content['services'][service]['image'] = service self.cache_docker_image(image_name_latest, image_name_latest) return True
def run_docker_compose(self, c): c.header("Build containers", show=True) f = self.job['dockerfile'] if self.job.get('base_path', None): f = os.path.join(self.job['base_path'], f) compose_file = os.path.join('/repo', f) compose_file_new = compose_file + ".infrabox" # rewrite compose file compose_file_content = create_from(compose_file) for service in compose_file_content['services']: service_cache_dir = os.path.join(self.infrabox_cache_dir, service) if not os.path.exists(service_cache_dir): makedirs(service_cache_dir) service_output_dir = os.path.join(self.infrabox_output_dir, service) makedirs(service_output_dir) service_testresult_dir = os.path.join(self.infrabox_testresult_dir, service) makedirs(service_testresult_dir) service_markdown_dir = os.path.join(self.infrabox_markdown_dir, service) makedirs(service_markdown_dir) service_markup_dir = os.path.join(self.infrabox_markup_dir, service) makedirs(service_markup_dir) service_badge_dir = os.path.join(self.infrabox_badge_dir, service) makedirs(service_badge_dir) service_volumes = [ "/repo:/infrabox/context", "%s:/infrabox/cache" % service_cache_dir, "%s:/infrabox/inputs" % self.infrabox_inputs_dir, "%s:/infrabox/output" % service_output_dir, "%s:/infrabox/upload/testresult" % service_testresult_dir, "%s:/infrabox/upload/markdown" % service_markdown_dir, "%s:/infrabox/upload/markup" % service_markup_dir, "%s:/infrabox/upload/badge" % service_badge_dir, ] if os.environ['INFRABOX_LOCAL_CACHE_ENABLED'] == 'true': service_volumes.append("/local-cache:/infrabox/local-cache") compose_file_content['services'][service]['volumes'] = service_volumes with open(compose_file_new, "w+") as out: yaml.dump(compose_file_content, out, default_flow_style=False) collector = StatsCollector() try: self.environment['PATH'] = os.environ['PATH'] c.execute(['docker-compose', '-f', compose_file_new, 'build'], show=True, env=self.environment) c.header("Run docker-compose", show=True) cwd = self.job.get('base_path', None) if cwd: cwd = os.path.join('/repo', cwd) c.execute(['docker-compose', '-f', compose_file_new, 'up', '--abort-on-container-exit'], env=self.environment, show=True, cwd=cwd) c.execute(['docker-compose', '-f', compose_file_new, 'ps'], env=self.environment, cwd=cwd) c.execute(['get_compose_exit_code.sh', compose_file_new], env=self.environment, cwd=cwd) except: raise Failure("Failed to build and run container") finally: collector.stop() self.post_stats(collector.get_result()) return True
def test_volume_parsing(self): create_from('./tests/test/volume_parsing.yaml')
def build_and_run_docker_compose(args, job): compose_file = os.path.join(job['base_path'], job['docker_compose_file']) compose_file_new = compose_file + ".infrabox" # rewrite compose file compose_file_content = docker_compose.create_from(compose_file) for service in compose_file_content['services']: infrabox = create_infrabox_directories(args, job, service=service) compose_file_content['services'][service]['volumes'] = [ "%s:/infrabox" % str(infrabox) ] with open(compose_file_new, "w+") as out: yaml.dump(compose_file_content, out, default_flow_style=False) env = {"PATH": os.environ['PATH']} for e in args.environment: s = e.split("=") env[s[0]] = s[1] if 'environment' in job: for name, value in job['environment'].iteritems(): if isinstance(value, dict): continue env[name] = value if args.clean: execute([ 'docker-compose', '-p', args.project_name, '-f', compose_file_new, 'rm', '-f' ], env=env, cwd=job['base_path']) execute([ 'docker-compose', '-p', args.project_name, '-f', compose_file_new, 'build' ], env=env, cwd=job['base_path']) def signal_handler(_, __): logger.info("Stopping docker containers") execute(['docker-compose', '-f', compose_file_new, 'stop'], env=env, cwd=job['base_path']) os.remove(compose_file_new) sys.exit(0) signal.signal(signal.SIGINT, signal_handler) execute([ 'docker-compose', '-p', args.project_name, '-f', compose_file_new, 'up', '--abort-on-container-exit' ], env=env) signal.signal(signal.SIGINT, signal.SIG_DFL) os.remove(compose_file_new)
def build_and_run_docker_compose(args, job): compose_file = os.path.join(job['base_path'], job['docker_compose_file']) compose_file_new = compose_file + ".infrabox" # rewrite compose file compose_file_content = docker_compose.create_from(compose_file) for service in compose_file_content['services']: infrabox = create_infrabox_directories(args, job, service=service) compose_file_content['services'][service]['volumes'] = [ "%s:/infrabox" % str(infrabox) ] with open(compose_file_new, "w+") as out: yaml.dump(compose_file_content, out, default_flow_style=False) env = {"PATH": os.environ['PATH']} for e in args.environment: s = e.split("=") env[s[0]] = s[1] if args.clean: execute([ 'docker-compose', '-p', args.project_name, '-f', compose_file_new, 'rm', '-f' ], env=env, cwd=job['base_path']) execute([ 'docker-compose', '-p', args.project_name, '-f', compose_file_new, 'build' ], env=env, cwd=job['base_path']) pname = args.project_name.replace("-", "") image_names = subprocess.check_output( "docker images | grep %s | awk '{print $1;}'" % pname, shell=True) for l in image_names.splitlines(): check_username(l) def signal_handler(_, __): logger.info("Stopping docker containers") execute(['docker-compose', '-f', compose_file_new, 'stop'], env=env, cwd=job['base_path']) os.remove(compose_file_new) sys.exit(0) signal.signal(signal.SIGINT, signal_handler) execute([ 'docker-compose', '-p', args.project_name, '-f', compose_file_new, 'up', '--abort-on-container-exit' ], env=env) signal.signal(signal.SIGINT, signal.SIG_DFL) os.remove(compose_file_new)