def test_load_json(self, *args): cases = [ [ lambda path: path == Loader.BUILTIN_DATA_PATH, Loader.BUILTIN_DATA_PATH, "Loader should use builtin data path." ], [ lambda path: path == Loader.CUSTOMER_DATA_PATH, Loader.CUSTOMER_DATA_PATH, "Loader should use customer data path." ], [ lambda path: True, Loader.CUSTOMER_DATA_PATH, "Loader should prefer file at customer data path over built-in path." ] ] for case in cases: loader = Loader() with patch('os.path.isdir') as mock_isdir: mock_isdir.side_effect = case[0] with patch.object(JSONFileLoader, 'load_file') as mock_load_file: mock_load_file.side_effect = lambda path: path data = loader.load_json('foo.json') self.assertEquals(1, mock_load_file.call_count) self.assertEquals(os.path.join(case[1], 'foo.json'), data, case[2])
def test_load_service_data(self, *args): # local aliases for brevity builtin = FakeFileSystem.builtin_full_path customer = FakeFileSystem.customer_full_path correct = [['service-1', builtin('service-1', 'service-1-2.yaml')], ['service-4', builtin('service-4', 'service-4-2.yaml')], ['service-2', customer('service-2', 'service-2-1.yaml')], ['service-3', customer('service-3', 'service-3-2.yaml')], ['service-5', customer('service-5', 'service-5-1.yaml')], ['alias-1', builtin('service-1', 'service-1-2.yaml')]] with patch.object(YAMLFileLoader, 'load_alias') as mock_load_alias: mock_load_alias.side_effect = \ lambda path: 'alias-1' if 'service-1' in path else None loader = Loader() for service in correct: with patch.object(YAMLFileLoader, 'load_file') as mock_load_file: mock_load_file.side_effect = lambda path: path self.assertEqual(service[1], loader.load_service_data(service[0])) self.assertEqual(1, mock_load_file.call_count) for not_found in ['service-6', 'service-foo']: with patch.object(YAMLFileLoader, 'load_file') as mock_load_file: with self.assertRaises(ValidationError): loader.load_service_data(not_found) self.assertEqual(0, mock_load_file.call_count)
def test_list_available_services(self, *args): loader = Loader() services = loader.list_available_services() self.assertEqual(6, len(services)) self.assertEqual(len(set(services)), len(services)) self.assertEqual( 0, len( set(services).symmetric_difference([ 'alias-1', 'service-1', 'service-2', 'service-3', 'service-4', 'service-5' ])))
def __init__(self): self._loader = Loader() self._endpoint_creator = EndpointCreator(EndpointResolver()) self._user_agent_header = self._build_user_agent_header() self._response_parser_factory = ResponseParserFactory() self._cli_data = self._loader.load_json('cli.json') self._retryhandler = self._create_default_retryhandler() self._available_services = self._loader.list_available_services() self._command_table = self._build_command_table() self._argument_table = self._build_argument_table() self._client_creator = ClientCreator(self._loader, Context(), self._endpoint_creator, self._user_agent_header, self._response_parser_factory, self._retryhandler)
def test_load_service_data_repeatable_cached(self, *args): with patch.object(YAMLFileLoader, 'load_file'): loader = Loader() # warm up with both args and kwargs, ensure results are identical with patch.object(YAMLFileLoader, 'load_file') as mock_load_file: mock_load_file.side_effect = lambda path: path data = loader.load_service_data('service-1') self.assertEqual( data, loader.load_service_data(service_name='service-1')) self.assertEqual(2, mock_load_file.call_count) # ensure results are repeatable and are coming from cache (not calls to load_file) with patch.object(YAMLFileLoader, 'load_file') as mock_load_file: mock_load_file.side_effect = lambda path: path self.assertEqual(data, loader.load_service_data('service-1')) self.assertEqual( data, loader.load_service_data(service_name='service-1')) self.assertEqual(0, mock_load_file.call_count)
def __init__(self, debug=False, tls_verify=False, strict_errors=False, tls_warnings=False, client_endpoint=None, cdp_credentials=None, error_handler=None, warning_handler=None, scrub_inputs=True, cp_region='default', agent_header=None): # Init Params self.debug = debug self.tls_verify = tls_verify self.strict_errors = strict_errors self.tls_warnings = tls_warnings self.client_endpoint = client_endpoint self.cdp_credentials = cdp_credentials self.scrub_inputs = scrub_inputs self.cp_region = cp_region self.agent_header = agent_header if agent_header is not None else 'CDPY' # Setup self.throw_error = error_handler if error_handler else self._default_throw_error self.throw_warning = warning_handler if warning_handler else self._default_throw_warning self._clients = {} self.DEFAULT_PAGE_SIZE = 100 _loader = Loader() _user_agent = self._make_user_agent_header() self._client_creator = ClientCreator( _loader, Context(), EndpointCreator(EndpointResolver()), _user_agent, ResponseParserFactory(), create_retry_handler(self._load_retry_config(_loader))) # Logging _log_format = '%(asctime)s - %(threadName)s - %(name)s - %(levelname)s - %(message)s' if debug: self._setup_logger(logging.DEBUG, _log_format) self.logger.debug("CDP SDK version: %s", _user_agent) else: self._setup_logger(logging.ERROR, _log_format) if self.tls_warnings is False: urllib3.disable_warnings(InsecureRequestWarning) # Warnings def _warning_format(message, category, filename, lineno, line=None): return ' %s:%s: %s:%s' % (filename, lineno, category.__name__, message) warnings.formatwarning = _warning_format # State listings # https://github.com/hortonworks/cloudbreak/blob/master/cluster-api/src/main/java/com/sequenceiq/ # cloudbreak/cluster/status/ClusterStatus.java#L8-L18 # https://github.com/hortonworks/cloudbreak/blob/master/core-api/src/main/java/com/sequenceiq/ # cloudbreak/api/endpoint/v4/common/Status.java#L14-L53 self.CREATION_STATES = [ 'REQUESTED', 'EXTERNAL_DATABASE_CREATION_IN_PROGRESS', 'STACK_CREATION_IN_PROGRESS', 'CREATION_INITIATED', 'FREEIPA_CREATION_IN_PROGRESS', 'STARTING', 'ENABLING', # DF 'provision:started', # ML 'installation:started' # ML ] self.TERMINATION_STATES = [ 'EXTERNAL_DATABASE_DELETION_IN_PROGRESS', 'STACK_DELETION_IN_PROGRESS', 'FREEIPA_DELETE_IN_PROGRESS', 'STOPPING', 'deprovision:started', # ML 'DISABLING' # DF ] self.STARTED_STATES = [ 'EXTERNAL_DATABASE_START_IN_PROGRESS', 'AVAILABLE', 'START_IN_PROGRESS', 'RUNNING', 'installation:finished', # ML 'Running', # DW 'GOOD_HEALTH', # DF 'ClusterCreationCompleted' #DE ] self.STOPPED_STATES = [ 'EXTERNAL_DATABASE_STOP_IN_PROGRESS', 'STOP_IN_PROGRESS', 'STOPPED', 'ENV_STOPPED', 'Stopped', # DW 'NOT_ENABLED', # DF 'ClusterDeletionCompleted', 'AppDeleted' # DE ] self.FAILED_STATES = [ 'PROVISIONING_FAILED', 'CREATE_FAILED', 'REJECTED', 'FAILED', 'TIMEDOUT', 'DELETE_FAILED', 'Error', # DW 'installation:failed', # ML 'provision:failed', # ML 'deprovision:failed', # ML 'BAD_HEALTH', # DF # DE service (all intermediate failure states, until CDE exposes a higher-level summary state) 'ClusterChartInstallationFailed', 'ClusterDNSCreationFailed', 'ClusterDNSDeletionFailed', 'ClusterIngressCreationFailed', 'ClusterProvisioningFailed', 'DBProvisioningFailed', 'FSMountTargetsCreationFailed', 'FSProvisioningFailed', 'ClusterTLSCertCreationFailed', 'ClusterServiceMeshProvisioningFailed', 'ClusterMonitoringConfigurationFailed', 'ClusterChartDeletionFailed', 'ClusterDeletionFailed', 'ClusterNamespaceDeletionFailed', 'DBDeletionFailed', 'FSMountTargetsDeletionFailed', 'FSDeletionFailed', 'ClusterTLSCertDeletionFailed', 'ClusterServiceMeshDeletionFailed', 'ClusterAccessGroupCreationFailed', 'ClusterAccessGroupDeletionFailed', 'ClusterUserSyncCheckFailed', 'ClusterCreationFailed', 'ClusterDeleteFromDBFailed', 'ClusterMaintenanceFailed', 'ClusterTLSCertRenewalFailed', # DE virtual cluster 'AppInstallationFailed', 'AppDeletionFailed' ] self.REMOVABLE_STATES = [ 'AVAILABLE', 'UPDATE_FAILED', 'CREATE_FAILED', 'ENABLE_SECURITY_FAILED', 'DELETE_FAILED', 'DELETE_COMPLETED', 'DELETED_ON_PROVIDER_SIDE', 'STOPPED', 'START_FAILED', 'STOP_FAILED', 'installation:failed', 'deprovision:failed', 'installation:finished', 'modify:finished', # ML 'Error', 'Running', 'Stopped', 'Deleting', # DW 'GOOD_HEALTH', 'CONCERNING_HEALTH', 'BAD_HEALTH', # DF 'ClusterCreationCompleted', 'AppInstalled', 'ClusterProvisioningFailed' #DE ] # common regex patterns self.DATAHUB_NAME_PATTERN = re.compile(r'[^a-z0-9-]') self.DATALAKE_NAME_PATTERN = re.compile(r'[^a-z0-9-]') self.ENV_NAME_PATTERN = re.compile(r'(^[^a-z0-9]|[^a-z0-9-]|^.{,4}$|^.{29,}$)') self.CREDENTIAL_NAME_PATTERN = re.compile(r'[^a-z0-9-]') self.OPERATION_REGEX = re.compile(r'operation ([0-9a-zA-Z-]{36}) running') # Workload services with special credential and endpoint handling self.WORKLOAD_SERVICES = ['dfworkload'] # substrings to check for in different CRNs self.CRN_STRINGS = { 'generic': ['crn:'], 'env': [':environments:', ':environment:'], 'df': [':df:', ':service:'], 'flow': [':df:', ':flow:'], 'readyflow': [':df:', 'readyFlow'], 'deployment': [':df:', ':deployment:'] }
def test_load_service_data_no_dirs(self, *args): with patch.object(YAMLFileLoader, 'load_file') as mock_load_file: loader = Loader() with self.assertRaises(ValidationError): loader.load_service_data('service-1') self.assertEquals(0, mock_load_file.call_count)
def test_load_json_no_dirs(self, *args): with patch.object(JSONFileLoader, 'load_file') as mock_load_file: loader = Loader() with self.assertRaises(DataNotFoundError): loader.load_json('foo.json') self.assertEquals(0, mock_load_file.call_count)
def test_missing_builtin_service_alias(self, *args): '''Throws an exception because built-in service-1 is missing''' Loader()
def __init__(self, debug=False, tls_verify=False, strict_errors=False, tls_warnings=False, client_endpoint=None, cdp_credentials=None, error_handler=None, warning_handler=None, scrub_inputs=True): # Init Params self.debug = debug self.tls_verify = tls_verify self.strict_errors = strict_errors self.tls_warnings = tls_warnings self.client_endpoint = client_endpoint self.cdp_credentials = cdp_credentials self.scrub_inputs = scrub_inputs # Setup self.throw_error = error_handler if error_handler else self._default_throw_error self.throw_warning = warning_handler if warning_handler else self._default_throw_warning self._clients = {} self._PAGE_SIZE = 50 _loader = Loader() _user_agent = self._make_user_agent_header() self._client_creator = ClientCreator( _loader, Context(), EndpointCreator(EndpointResolver()), _user_agent, ResponseParserFactory(), create_retry_handler(self._load_retry_config(_loader))) # Logging _log_format = '%(asctime)s - %(threadName)s - %(name)s - %(levelname)s - %(message)s' if debug: self._setup_logger(logging.DEBUG, _log_format) self.logger.debug("CDP SDK version: %s", _user_agent) else: self._setup_logger(logging.ERROR, _log_format) if self.tls_warnings is False: urllib3.disable_warnings(InsecureRequestWarning) # Warnings def _warning_format(message, category, filename, lineno, line=None): return ' %s:%s: %s:%s' % (filename, lineno, category.__name__, message) warnings.formatwarning = _warning_format # State listings # https://github.com/hortonworks/cloudbreak/blob/master/cluster-api/src/main/java/com/sequenceiq/ # cloudbreak/cluster/status/ClusterStatus.java#L8-L18 # https://github.com/hortonworks/cloudbreak/blob/master/core-api/src/main/java/com/sequenceiq/ # cloudbreak/api/endpoint/v4/common/Status.java#L14-L53 self.CREATION_STATES = [ 'REQUESTED', 'EXTERNAL_DATABASE_CREATION_IN_PROGRESS', 'STACK_CREATION_IN_PROGRESS', 'CREATION_INITIATED', 'FREEIPA_CREATION_IN_PROGRESS', 'STARTING' ] self.TERMINATION_STATES = [ 'EXTERNAL_DATABASE_DELETION_IN_PROGRESS', 'STACK_DELETION_IN_PROGRESS', 'FREEIPA_DELETE_IN_PROGRESS', 'STOPPING' ] self.STARTED_STATES = [ 'EXTERNAL_DATABASE_START_IN_PROGRESS', 'AVAILABLE', 'START_IN_PROGRESS', 'RUNNING', 'Running' ] self.STOPPED_STATES = [ 'EXTERNAL_DATABASE_STOP_IN_PROGRESS', 'STOP_IN_PROGRESS', 'STOPPED', 'ENV_STOPPED' ] self.FAILED_STATES = [ 'PROVISIONING_FAILED', 'CREATE_FAILED', 'REJECTED', 'FAILED', 'TIMEDOUT', 'DELETE_FAILED', 'Error' ] self.REMOVABLE_STATES = [ 'AVAILABLE', 'UPDATE_FAILED', 'CREATE_FAILED', 'ENABLE_SECURITY_FAILED', 'DELETE_FAILED', 'DELETE_COMPLETED', 'DELETED_ON_PROVIDER_SIDE', 'STOPPED', 'START_FAILED', 'STOP_FAILED', 'Error', 'Running' ] # common regex patterns self.DATAHUB_NAME_PATTERN = re.compile(r'[^a-z0-9-]') self.DATALAKE_NAME_PATTERN = re.compile(r'[^a-z0-9-]') self.ENV_NAME_PATTERN = re.compile( r'(^[^a-z0-9]|[^a-z0-9-]|^.{,4}$|^.{29,}$)') self.CREDENTIAL_NAME_PATTERN = re.compile(r'[^a-z0-9-]') self.OPERATION_REGEX = re.compile( r'operation ([0-9a-zA-Z-]{36}) running')