def needs_appliance(test_item): import json test_item = _mark_test('appliance', test_item) if less_strict_bool(os.getenv('TOIL_SKIP_DOCKER')): return unittest.skip('Skipping docker test.')(test_item) if next(which('docker'), None): image = applianceSelf() try: images = check_output(['docker', 'inspect', image]) except CalledProcessError: images = [] else: images = { i['Id'] for i in json.loads(images) if image in i['RepoTags'] } if len(images) == 0: return unittest.skip( "Cannot find appliance image %s. Use 'make test' target to " "automatically build appliance, or just run 'make docker' " "prior to running this test." % image)(test_item) elif len(images) == 1: return test_item else: assert False, 'Expected `docker inspect` to return zero or one image.' else: return unittest.skip('Install Docker to include this test.')(test_item)
def slow(test_item): """ Use this decorator to identify tests that are slow and not critical. Skip them if TOIL_TEST_QUICK is true. """ test_item = _mark_test('slow', test_item) if not less_strict_bool(os.getenv('TOIL_TEST_QUICK')): return test_item else: return unittest.skip('Skipped because TOIL_TEST_QUICK is "True"')( test_item)
def experimental(test_item): """ Use this to decorate experimental or brittle tests in order to skip them during regular builds. """ # We'll pytest.mark_test the test as experimental but we'll also unittest.skip it via an # environment variable. test_item = _mark_test('experimental', test_item) if not less_strict_bool(os.getenv('TOIL_TEST_EXPERIMENTAL')): return unittest.skip( 'Set TOIL_TEST_EXPERIMENTAL="True" to include this experimental test.' )(test_item)
def experimental(test_item): """ Use this to decorate experimental or brittle tests in order to skip them during regular builds. """ # We'll pytest.mark_test the test as experimental but we'll also unittest.skip it via an # environment variable. test_item = _mark_test('experimental', test_item) if less_strict_bool(os.getenv('TOIL_TEST_EXPERIMENTAL')): return test_item else: return unittest.skip( 'Set TOIL_TEST_EXPERIMENTAL="True" to include this experimental test.')(test_item)
def __create_lazy_dirs( self ): log.info( "Bind-mounting directory structure" ) for (parent, name, persistent) in self.lazy_dirs: assert parent[ 0 ] == os.path.sep logical_path = os.path.join( parent, name ) if persistent is None: tag = 'persist' + logical_path.replace( os.path.sep, '_' ) persistent = less_strict_bool( self.instance_tag( tag ) ) location = self.persistent_dir if persistent else self.ephemeral_dir physical_path = os.path.join( location, parent[ 1: ], name ) mkdir_p( physical_path ) os.chown( physical_path, self.uid, self.gid ) check_call( [ 'mount', '--bind', physical_path, logical_path ] )
def __create_lazy_dirs(self): log.info("Bind-mounting directory structure") for (parent, name, persistent) in self.lazy_dirs: assert parent[0] == os.path.sep logical_path = os.path.join(parent, name) if persistent is None: tag = 'persist' + logical_path.replace(os.path.sep, '_') persistent = less_strict_bool(self.instance_tag(tag)) location = self.persistent_dir if persistent else self.ephemeral_dir physical_path = os.path.join(location, parent[1:], name) mkdir_p(physical_path) os.chown(physical_path, self.uid, self.gid) check_call(['mount', '--bind', physical_path, logical_path])
def awsFilterImpairedNodes(nodes, ec2): # if TOIL_AWS_NODE_DEBUG is set don't terminate nodes with # failing status checks so they can be debugged nodeDebug = less_strict_bool(os.environ.get('TOIL_AWS_NODE_DEBUG')) if not nodeDebug: return nodes nodeIDs = [node.id for node in nodes] statuses = ec2.get_all_instance_status(instance_ids=nodeIDs) statusMap = {status.id: status.instance_status for status in statuses} healthyNodes = [node for node in nodes if statusMap.get(node.id, None) != 'impaired'] impairedNodes = [node.id for node in nodes if statusMap.get(node.id, None) == 'impaired'] logger.warn('TOIL_AWS_NODE_DEBUG is set and nodes %s have failed EC2 status checks so ' 'will not be terminated.', ' '.join(impairedNodes)) return healthyNodes
def integrative(test_item): """ Use this to decorate integration tests so as to skip them during regular builds. We define integration tests as A) involving other, non-Toil software components that we develop and/or B) having a higher cost (time or money). Note that brittleness does not qualify a test for being integrative. Neither does involvement of external services such as AWS, since that would cover most of Toil's test. """ # We'll pytest.mark_test the test as integrative but we'll also unittest.skip it via an # environment variable. test_item = _mark_test('integrative', test_item) if less_strict_bool(os.getenv('TOIL_TEST_INTEGRATIVE')): return test_item else: return unittest.skip( 'Set TOIL_TEST_INTEGRATIVE="True" to include this integration test.')(test_item)
def _set_instance_options(self, options): super(AgentBox, self)._set_instance_options(options) self._enable_agent = less_strict_bool(options.get('enable_agent'))
def _set_instance_options( self, options ): super( AgentBox, self )._set_instance_options( options ) self._enable_agent = less_strict_bool( options.get( 'enable_agent' ) )