def test_valid(): d = { "version": 1, "jobs": [{ "type": "docker", "name": "compile", "docker_file": "test/Dockerfile_benchmarks", "build_only": False, "machine_config": "vm_ubuntu_6c_8gb" }, { "type": "docker", "name": "benchmark_server", "docker_file": "test/Dockerfile_benchmarks", "machine_config": "vm_ubuntu_6c_8gb", "build_only": False, "depends_on": ["compile"] }, { "type": "docker", "name": "test_server", "docker_file": "test/Dockerfile_test_server", "machine_config": "vm_ubuntu_6c_8gb", "build_only": False, "depends_on": ["compile"] }] } validate_json(d)
def test_security_context(): d = { "version": 1, "jobs": [{ "type": "docker", "name": "test", "docker_file": "Dockerfile", "resources": { "limits": { "cpu": 1, "memory": 1024 } }, "security_context": [] }] } raises_expect(d, "#jobs[0].security_context: must be an object") d['jobs'][0]['security_context'] = {'capabilities': []} raises_expect(d, "#jobs[0].security_context.capabilities: must be an object") d['jobs'][0]['security_context'] = {'capabilities': {'add': {}}} raises_expect( d, "#jobs[0].security_context.capabilities.add: must be an array") d['jobs'][0]['security_context'] = {'capabilities': {'add': [123]}} raises_expect( d, "#jobs[0].security_context.capabilities.add[0]: is not a string") d['jobs'][0]['security_context'] = {'capabilities': {'add': ['CAP']}} validate_json(d)
def test_valid(self): d = { "version": 1, "jobs": [{ "type": "docker", "name": "compile", "docker_file": "test/Dockerfile_benchmarks", "build_only": False, "resources": {"limits": {"cpu": 1, "memory": 1024}}, }, { "type": "docker", "name": "benchmark_server", "docker_file": "test/Dockerfile_benchmarks", "resources": {"limits": {"cpu": 1, "memory": 1024}}, "build_only": False, "depends_on": ["compile"] }, { "type": "docker", "name": "test_server", "docker_file": "test/Dockerfile_test_server", "resources": {"limits": {"cpu": 1, "memory": 1024}}, "build_only": False, "depends_on": ["compile"] }] } validate_json(d)
def test_deployments(self): d = { "version": 1, "jobs": [{ "type": "docker", "name": "test", "docker_file": "Dockerfile", "resources": {"limits": {"cpu": 1, "memory": 1024}}, "deployments": None }] } self.raises_expect(d, "#jobs[0].deployments: must be an array") d['jobs'][0]['deployments'] = [] self.raises_expect(d, "#jobs[0].deployments: must not be empty") d['jobs'][0]['deployments'] = [{}] self.raises_expect(d, "#jobs[0].deployments[0]: does not contain a 'type'") d['jobs'][0]['deployments'] = [{'type': 'unknown'}] self.raises_expect(d, "#jobs[0].deployments[0]: type 'unknown' not supported") d['jobs'][0]['deployments'] = [{'type': 'docker-registry', 'host': 'hostname', 'repository': 'repo', 'username': '******', 'password': '******'}] self.raises_expect(d, "#jobs[0].deployments[0].password: must be an object") d['jobs'][0]['deployments'] = [{'type': 'docker-registry', 'host': 'hostname', 'repository': 'repo', 'username': '******', 'password': {'$secret': 'blub'}}] validate_json(d)
def test_environment(): d = { "version": 1, "jobs": [{ "type": "docker", "name": "test", "docker_file": "Dockerfile", "resources": { "limits": { "cpu": 1, "memory": 1024 } }, "environment": None }] } raises_expect(d, "#jobs[0].environment: must be an object") d['jobs'][0]['environment'] = [] raises_expect(d, "#jobs[0].environment: must be an object") d['jobs'][0]['environment'] = {'key': 123} raises_expect(d, "#jobs[0].environment.key: must be a string or object") d['jobs'][0]['environment'] = {'key': {}} raises_expect(d, "#jobs[0].environment.key: must contain a $secret") d['jobs'][0]['environment'] = {'key': {'$secret': None}} raises_expect(d, "#jobs[0].environment.key.$secret: is not a string") d['jobs'][0]['environment'] = {} validate_json(d)
def test_environment(): d = { "version": 1, "jobs": [{ "type": "docker", "name": "test", "docker_file": "Dockerfile", "machine_config": "vm", "environment": None }] } raises_expect(d, "#jobs[0].environment: must be an object") d['jobs'][0]['environment'] = [] raises_expect(d, "#jobs[0].environment: must be an object") d['jobs'][0]['environment'] = {'key': 123} raises_expect(d, "#jobs[0].environment.key: must be a string or object") d['jobs'][0]['environment'] = {'key': {}} raises_expect(d, "#jobs[0].environment.key: must contain a $ref") d['jobs'][0]['environment'] = {'key': {'$ref': None}} raises_expect(d, "#jobs[0].environment.key.$ref: is not a string") d['jobs'][0]['environment'] = {} validate_json(d)
def test_build_arguments(self): d = { "version": 1, "jobs": [{ "type": "docker", "name": "test", "docker_file": "Dockerfile", "resources": { "limits": { "cpu": 1, "memory": 1024 } }, "build_arguments": None }] } self.raises_expect(d, "#jobs[0].build_arguments: must be an object") d['jobs'][0]['build_arguments'] = [] self.raises_expect(d, "#jobs[0].build_arguments: must be an object") d['jobs'][0]['build_arguments'] = {'key': 123} self.raises_expect(d, "#jobs[0].build_arguments.key: is not a string") d['jobs'][0]['build_arguments'] = {'key': {}} self.raises_expect(d, "#jobs[0].build_arguments.key: is not a string") d['jobs'][0]['build_arguments'] = {} validate_json(d)
def parse_infrabox_json(self, path): with open(path, 'r') as f: data = None try: data = json.load(f) validate_json(data) except Exception as e: raise Failure(e.__str__()) return data
def validate_infrabox_json(args): args.project_root = os.path.abspath(args.project_root) infrabox_json_path = os.path.join(args.project_root, 'infrabox.json') if not os.path.isfile(infrabox_json_path): logger.error('%s does not exist' % infrabox_json_path) sys.exit(1) with open(infrabox_json_path, 'r') as f: data = json.load(f) validate_json(data)
def parse_infrabox_json(self, path): with open(path, 'r') as f: data = None try: data = json.load(f) self.console.collect(json.dumps(data, indent=4), show=True) validate_json(data) except Exception as e: raise Failure(e.__str__()) return data
def load_infrabox_json(path): if path in LOADED_FILES: logger.error('Recursive included detected with %s' % path) sys.exit(1) LOADED_FILES[path] = path with open(path) as f: data = json.load(f) validate_json(data) return data
def validate_infrabox_file(args): args.project_root = os.path.abspath(args.project_root) infrabox_file_path = args.infrabox_file_path if not os.path.isfile(infrabox_file_path): logger.error('%s does not exist' % infrabox_file_path) sys.exit(1) with open(infrabox_file_path, 'r') as f: try: data = json.load(f) except ValueError: f.seek(0) data = yaml.load(f) validate_json(data)
def load_infrabox_file(path): if path in LOADED_FILES: logger.error('Recursive included detected with %s' % path) sys.exit(1) LOADED_FILES[path] = path with open(path) as f: try: data = json.load(f) except ValueError: f.seek(0) data = yaml.load(f) validate_json(data) return data
def parse_infrabox_file(self, path): with open(path, 'r') as f: data = None try: data = json.load(f) self.console.collect(json.dumps(data, indent=4), show=True) except ValueError: f.seek(0) data = yaml.load(f) self.console.collect(yaml.dump(data, default_flow_style=False), show=True) try: validate_json(data) except Exception as e: raise Failure(e.__str__()) return data
def validate_infrabox_file(args): args.project_root = os.path.abspath(args.project_root) infrabox_file_path = args.infrabox_file_path if not os.path.isfile(infrabox_file_path): logger.error('%s does not exist' % infrabox_file_path) sys.exit(1) with open(infrabox_file_path, 'r') as f: try: data = json.load(f) except ValueError: f.seek(0) if (sys.version_info.major == 2) or (yaml.__version__ < "5.1"): data = yaml.load(f) else: data = yaml.load(f, Loader=yaml.FullLoader) validate_json(data)
def load_infrabox_file(path): if path in LOADED_FILES: logger.error('Recursive included detected with %s' % path) sys.exit(1) LOADED_FILES[path] = path with open(path) as f: try: data = json.load(f) except ValueError: f.seek(0) if (sys.version_info.major == 2) or (yaml.__version__ < "5.1"): data = yaml.load(f) else: data = yaml.load(f, Loader=yaml.FullLoader) validate_json(data) return data
def test_dep_defined_later(self): d = { "version": 1, "jobs": [{ "type": "docker", "name": "source", "docker_file": "Dockerfile", "resources": {"limits": {"cpu": 1, "memory": 1024}}, "build_only": False, "depends_on": ["compile"] }, { "type": "docker", "name": "compile", "docker_file": "Dockerfile", "build_only": False, "resources": {"limits": {"cpu": 1, "memory": 1024}}, }] } validate_json(d)
def test_kubernetes_limits(): d = { "version": 1, "jobs": [{ "type": "docker", "name": "test", "docker_file": "Dockerfile", "resources": { "limits": { "cpu": 1, "memory": 1024 }, "kubernetes": { "limits": { "cpu": 1, "memory": 1024 } } } }] } validate_json(d)
def test_repository(self): d = { "version": 1, "jobs": [{ "type": "docker", "name": "compile", "docker_file": "test/Dockerfile_benchmarks", "repository": { "full_history": True, "clone": False, "submodules": True }, "resources": { "limits": { "cpu": 1, "memory": 1024 } }, "build_only": False }] } validate_json(d)
def raises_expect(self, data, expected): try: validate_json(data) assert False except ValidationError as e: self.assertEqual(e.message, expected)
def test_empty_jobs(self): validate_json({'version': 1, 'jobs': []})
def load_infrabox_json(path): with open(path) as f: data = json.load(f) validate_json(data) return data
def raises_expect(data, expected): with assert_raises(ValidationError) as e: validate_json(data) eq_(e.exception.message, expected)