def run(self): """ Converges the external PSC into the Management Node without shutting down the Platform Services Controller. """ session = get_unverified_session() if self.skipverification else None sec_ctx = create_user_password_security_context( self.username, self.password) # TODO The following line to be deleted when API is changed to # @Release type. As of now this is only for testing app_ctx = ApplicationContext({SHOW_UNRELEASED_APIS: "True"}) connector = get_requests_connector( session=session, msg_protocol='json', url='https://{0}:5480/api'.format(self.server), provider_filter_chain=[ LegacySecurityContextFilter( security_context=sec_ctx)]) connector.set_application_context(app_ctx) stub_config = StubConfigurationFactory.new_std_configuration(connector) deployment_type = DeploymentType(stub_config) """ Running convergence task precheck. Remove the line ", only_precheck = True" to perform convergence. """ convergence_task = deployment_type.convert_to_vcsa_embedded_task( DeploymentType.ConvergenceSpec(DeploymentType.PscInfo( sso_admin_username=self.sso_admin_username, sso_admin_password=self.sso_admin_password), only_precheck=True)) print('Converge operation started with task ID: \n{0}'.format( convergence_task.get_task_id()))
def run(self): """ Decommissions a PSC node from a Management Node """ session = get_unverified_session() if self.skipverification else None sec_ctx = create_user_password_security_context( self.username, self.password) # TODO The following line to be deleted when API is changed to # @Release type. As of now this is only for testing app_ctx = ApplicationContext({SHOW_UNRELEASED_APIS: "True"}) connector = get_requests_connector(session=session, msg_protocol='json', url='https://{0}:5480/api'.format( self.server)) connector.set_security_context(sec_ctx) connector.set_application_context(app_ctx) stub_config = StubConfigurationFactory.new_std_configuration(connector) pscs_obj = Pscs(stub_config) """ Running decommission task precheck. Remove the line ", only_precheck = True" to perform decommission. """ decommission_task = pscs_obj.decommission_task( self.psc_hostname, Pscs.DecommissionSpec(sso_admin_username=self.sso_admin_username, sso_admin_password=self.sso_admin_password), only_precheck=True) print('Decommission operation started with task ID: \n%s', decommission_task.get_task_id())
def test_op(self): ctx = ExecutionContext(ApplicationContext(), None) input_val = VoidValue() method_result = self.sec_ctx_filter.invoke('svc', 'op1', input_val, ctx) self.assertEqual(method_result.output, IntegerValue(10)) self.assertEqual(method_result.error, None)
def test_noauth_op_unknown_op(self): sec_ctx = None app_ctx = ApplicationContext() ctx = ExecutionContext(app_ctx, sec_ctx) input_val = VoidValue() method_result = self.authz_filter.invoke('com', 'op', input_val, ctx) self.assertEqual(method_result.output, IntegerValue(10)) self.assertEqual(method_result.error, None)
def create_default_application_context(): """ Create a default application context. The created context will only have opId. :rtype: :class:`vmware.vapi.core.ApplicationContext` :return: Newly created application context """ return ApplicationContext({OPID: create_operation_id()})
def app_ctx(ctx): """ get application context from jsonrpc dict :type ctx: :class:`dict` :param ctx: json application context :rtype: :class:`str` :return: operation identifier """ return ApplicationContext(ctx)
def test_invalid_user_pwd_scheme(self): sec_ctx = SecurityContext({ SCHEME_ID: OAUTH_SCHEME_ID, ACCESS_TOKEN: 'token' }) app_ctx = ApplicationContext() ctx = ExecutionContext(app_ctx, sec_ctx) input_val = VoidValue() method_result = self.authn_filter.invoke('com.pkg.svc', 'op', input_val, ctx) self.assertEqual(method_result.output, IntegerValue(10)) self.assertEqual(method_result.error, None)
def test_invalid_user_pwd(self): sec_ctx = SecurityContext({ SCHEME_ID: USER_PASSWORD_SCHEME_ID, USER_KEY: 'testuser', PASSWORD_KEY: 'invalidpassword' }) app_ctx = ApplicationContext() ctx = ExecutionContext(app_ctx, sec_ctx) input_val = VoidValue() method_result = self.authn_filter.invoke('com.pkg.svc', 'op', input_val, ctx) self.assertEqual(method_result.error.name, 'com.vmware.vapi.std.errors.unauthenticated')
def test_user_pwd_scheme(self): sec_ctx = SecurityContext({ SCHEME_ID: USER_PASSWORD_SCHEME_ID, USER_KEY: 'testuser', PASSWORD_KEY: 'password' }) app_ctx = ApplicationContext() ctx = ExecutionContext(app_ctx, sec_ctx) input_val = VoidValue() method_result = self.authn_filter.invoke('com.pkg.svc', 'op', input_val, ctx) self.assertEqual(method_result.output, IntegerValue(10)) self.assertEqual(method_result.error, None)
def test_op_with_sec_ctx_on_filter(self): sec_ctx = SecurityContext({ SCHEME_ID: USER_PASSWORD_SCHEME_ID, USER_KEY: 'testuser', PASSWORD_KEY: 'password' }) self.sec_ctx_filter.set_security_context(sec_ctx) ctx = ExecutionContext(ApplicationContext(), None) input_val = VoidValue() method_result = self.sec_ctx_filter.invoke('svc', 'op1', input_val, ctx) self.assertEqual(method_result.output, IntegerValue(10)) self.assertEqual(method_result.error, None)
def test_noauth_op_with_valid_user(self): sec_ctx = SecurityContext({ SCHEME_ID: USER_PASSWORD_SCHEME_ID, USER_KEY: 'testuser', PASSWORD_KEY: 'password', AUTHN_IDENTITY: UserIdentity('testuser') }) app_ctx = ApplicationContext() ctx = ExecutionContext(app_ctx, sec_ctx) input_val = VoidValue() method_result = self.authz_filter.invoke('com.pkg.svc', 'op1', input_val, ctx) self.assertEqual(method_result.output, IntegerValue(10)) self.assertEqual(method_result.error, None)
def test_check_application_context(self): # Invoke an operation that checks for application context in 10 threads input_ = StructValue(application_context_method_name) num_threads = 10 results = [None] * num_threads threads = [None] * num_threads for i in range(num_threads): threads[i] = threading.Thread( target=self._run_method_in_thread, args=( results, i, self.provider, interface_name, application_context_method_name, input_, ExecutionContext(application_context=ApplicationContext( { 'thread_no': str(i) })),)) threads[i].start() for i in range(num_threads): threads[i].join() method_result = results[i] self.assertTrue(method_result.success()) self.assertTrue(method_result.output.value, str(i))
def main(): # Get user input. metadata_api_url, rest_navigation_url, output_dir, verify, enable_filtering, GENERATE_METAMODEL, SPECIFICATION, GENERATE_UNIQUE_OP_IDS, TAG_SEPARATOR = connection.get_input_params() # Maps enumeration id to enumeration info enumeration_dict = {} # Maps structure_id to structure_info structure_dict = {} # Maps service_id to service_info service_dict = {} # Maps service url to service id service_urls_map = {} start = timeit.default_timer() print('Trying to connect ' + metadata_api_url) session = requests.session() session.verify = False connector = get_requests_connector(session, url=metadata_api_url) if not enable_filtering: connector.set_application_context( ApplicationContext({SHOW_UNRELEASED_APIS: "True"})) print('Connected to ' + metadata_api_url) component_svc = connection.get_component_service(connector) dict_processing.populate_dicts( component_svc, enumeration_dict, structure_dict, service_dict, service_urls_map, rest_navigation_url, GENERATE_METAMODEL) if enable_filtering: service_urls_map = dict_processing.get_service_urls_from_rest_navigation( rest_navigation_url, verify) http_error_map = utils.HttpErrorMap(component_svc) # package_dict_api holds list of all service urls which come under /api package_dict_api, package_dict = dict_processing.add_service_urls_using_metamodel( service_urls_map, service_dict, rest_navigation_url) rest = RestUrlProcessing() api = ApiUrlProcessing() threads = [] for package, service_urls in six.iteritems(package_dict): worker = threading.Thread( target=rest.process_service_urls, args=( package, service_urls, output_dir, structure_dict, enumeration_dict, service_dict, service_urls_map, http_error_map, rest_navigation_url, enable_filtering, SPECIFICATION, GENERATE_UNIQUE_OP_IDS)) worker.daemon = True worker.start() threads.append(worker) for package, service_urls in six.iteritems(package_dict_api): worker = threading.Thread( target=api.process_service_urls, args=( package, service_urls, output_dir, structure_dict, enumeration_dict, service_dict, service_urls_map, http_error_map, rest_navigation_url, enable_filtering, SPECIFICATION, GENERATE_UNIQUE_OP_IDS)) worker.daemon = True worker.start() threads.append(worker) for worker in threads: worker.join() # api.json contains list of packages which is used by UI to dynamically # populate dropdown. api_files_list = [] for name in list(package_dict.keys()): api_files_list.append("rest_" + name) for name in list(package_dict_api.keys()): api_files_list.append("api_" + name) api_files = {'files': api_files_list} utils.write_json_data_to_file( output_dir + os.path.sep + 'api.json', api_files) stop = timeit.default_timer() print('Generated swagger files at ' + output_dir + ' for ' + metadata_api_url + ' in ' + str(stop - start) + ' seconds')
def main(): # Get user input. metadata_api_url, \ rest_navigation_url, \ output_dir, \ verify, \ show_unreleased_apis, \ GENERATE_METAMODEL, \ SPECIFICATION, \ GENERATE_UNIQUE_OP_IDS, \ TAG_SEPARATOR, \ DEPRECATE_REST,\ fetch_auth_metadata,\ auto_rest_services = connection.get_input_params() # Maps enumeration id to enumeration info enumeration_dict = {} # Maps structure_id to structure_info structure_dict = {} # Maps service_id to service_info service_dict = {} # Maps service url to service id service_urls_map = {} rest_navigation_handler = RestNavigationHandler(rest_navigation_url) start = timeit.default_timer() print('Trying to connect ' + metadata_api_url) session = requests.session() session.verify = False connector = get_requests_connector(session, url=metadata_api_url) if show_unreleased_apis: connector.set_application_context( ApplicationContext({SHOW_UNRELEASED_APIS: "True"})) print('Connected to ' + metadata_api_url) component_svc = connection.get_component_service(connector) auth_navigator = None if fetch_auth_metadata: # Fetch authentication metadata and initialize the authentication data navigator auth_component_svc = connection.get_authentication_component_service( connector) auth_dict = authentication_metadata_processing.get_authentication_dict( auth_component_svc) auth_navigator = AuthenticationDictNavigator(auth_dict) dict_processing.populate_dicts(component_svc, enumeration_dict, structure_dict, service_dict, service_urls_map, rest_navigation_url, GENERATE_METAMODEL) http_error_map = utils.HttpErrorMap(component_svc) deprecation_handler = None # package_dict_api holds list of all service urls which come under /api # package_dict_deprecated holds a list of all service urls which come under /rest, but are # deprecated with /api # replacement_dict contains information about the deprecated /rest to /api mappings package_dict_api, package_dict, package_dict_deprecated, replacement_dict = dict_processing.add_service_urls_using_metamodel( service_urls_map, service_dict, rest_navigation_handler, auto_rest_services, DEPRECATE_REST) utils.combine_dicts_with_list_values(package_dict, package_dict_deprecated) if DEPRECATE_REST: deprecation_handler = RestDeprecationHandler(replacement_dict) rest = RestMetadataProcessor() api = ApiMetadataProcessor() rest_package_spec_dict = {} api_package_spec_dict = {} with futures.ThreadPoolExecutor() as executor: rest_package_future_dict = { package: executor.submit(rest.get_path_and_type_dicts, package, service_urls, structure_dict, enumeration_dict, service_dict, service_urls_map, http_error_map, rest_navigation_handler, show_unreleased_apis, SPECIFICATION, auth_navigator, deprecation_handler) for package, service_urls in six.iteritems(package_dict) } api_package_future_dict = { package: executor.submit(api.get_path_and_type_dicts, package, service_urls, structure_dict, enumeration_dict, service_dict, service_urls_map, http_error_map, show_unreleased_apis, SPECIFICATION, auth_navigator) for package, service_urls in six.iteritems(package_dict_api) } rest_package_spec_dict = { package: future.result() for package, future in six.iteritems(rest_package_future_dict) } api_package_spec_dict = { package: future.result() for package, future in six.iteritems(api_package_future_dict) } file_handler = FileOutputHandler(rest_package_spec_dict, api_package_spec_dict, output_dir, GENERATE_UNIQUE_OP_IDS, SPECIFICATION) file_handler.output_files() stop = timeit.default_timer() print('Generated swagger files at ' + output_dir + ' for ' + metadata_api_url + ' in ' + str(stop - start) + ' seconds')