def test_application_todict_dict_with_obj(self): MyObject = namedtuple('MyObject', 'a b') mo = MyObject('x', 'y') the_input = {'a': mo} actual = todict(the_input) expected = {'a': {'a': 'x', 'b': 'y'}} self.assertEqual(actual, expected)
def connection_validate(cmd, client, connection_name=None, source_resource_group=None, source_id=None, indentifier=None, site=None, spring=None, app=None, deployment=None): import re from ._validators import get_resource_regex if not source_id or not connection_name: raise RequiredArgumentMissingError( err_msg.format('--source-id, --connection')) # HACK: get linker first to infer target resource type so that user token can be # set to work around OBO linker = todict( client.get(resource_uri=source_id, linker_name=connection_name)) target_id = linker.get('targetId') for resource_type, resource_id in TARGET_RESOURCES.items(): matched = re.match(get_resource_regex(resource_id), target_id, re.IGNORECASE) if matched and resource_type in TARGET_RESOURCES_USERTOKEN: client = set_user_token_header(client, cmd.cli_ctx) return auto_register(client.begin_validate, resource_uri=source_id, linker_name=connection_name)
def get_directory_properties(client, timeout=None): from .._transformers import transform_fs_access_output prop = todict(client.get_directory_properties(timeout=timeout)) acl = transform_fs_access_output( client.get_access_control(timeout=timeout)) result = dict(prop, **acl) return result
def transform_queue_policy_output(result): result = todict(result) if result['start']: result['start'] = parser.parse(result['start']) if result['expiry']: result['expiry'] = parser.parse(result['expiry']) return result
def _run_job(self, expanded_arg, cmd_copy): params = self._filter_params(expanded_arg) try: result = cmd_copy(params) if cmd_copy.supports_no_wait and getattr(expanded_arg, 'no_wait', False): result = None elif cmd_copy.no_wait_param and getattr( expanded_arg, cmd_copy.no_wait_param, False): result = None transform_op = cmd_copy.command_kwargs.get('transform', None) if transform_op: result = transform_op(result) if _is_poller(result): result = LongRunningOperation( cmd_copy.cli_ctx, 'Starting {}'.format(cmd_copy.name))(result) elif _is_paged(result): result = list(result) result = todict(result, AzCliCommandInvoker.remove_additional_prop_layer) event_data = {'result': result} cmd_copy.cli_ctx.raise_event(EVENT_INVOKER_TRANSFORM_RESULT, event_data=event_data) return event_data['result'] except Exception as ex: # pylint: disable=broad-except if cmd_copy.exception_handler: cmd_copy.exception_handler(ex) return CommandResultItem(None, exit_code=1, error=ex) else: six.reraise(*sys.exc_info())
def create_key_vault_reference_connection_if_not_exist(cmd, client, source_id, key_vault_id): from ._validators import get_source_resource_name from knack.log import get_logger logger = get_logger(__name__) logger.warning('get valid key vualt reference connection') all_connections = todict(client.list(resource_uri=source_id)) key_vault_connections = [] for connection in all_connections: # pylint: disable=not-an-iterable if connection.get('targetId') == key_vault_id: key_vault_connections.append(connection) source_name = get_source_resource_name(cmd) auth_info = get_auth_if_no_valid_key_vault_connection(logger, source_name, source_id, key_vault_connections) if not auth_info: return # No Valid Key Vault Connection, Create logger.warning('no valid key vault connection found. Creating...') from ._resource_config import CLIENT_TYPE connection_name = generate_random_string(prefix='keyvault_') parameters = { 'target_id': key_vault_id, 'auth_info': auth_info, 'client_type': CLIENT_TYPE.Dotnet, # Key Vault Configuration are same across all client types } return auto_register(client.begin_create_or_update, resource_uri=source_id, linker_name=connection_name, parameters=parameters)
def _run_job(self, expanded_arg, cmd_copy): params = self._filter_params(expanded_arg) try: result = cmd_copy(params) if cmd_copy.supports_no_wait and getattr(expanded_arg, 'no_wait', False): result = None elif cmd_copy.no_wait_param and getattr(expanded_arg, cmd_copy.no_wait_param, False): result = None transform_op = cmd_copy.command_kwargs.get('transform', None) if transform_op: result = transform_op(result) if _is_poller(result): result = LongRunningOperation(cmd_copy.cli_ctx, 'Starting {}'.format(cmd_copy.name))(result) elif _is_paged(result): result = list(result) result = todict(result, AzCliCommandInvoker.remove_additional_prop_layer) event_data = {'result': result} cmd_copy.cli_ctx.raise_event(EVENT_INVOKER_TRANSFORM_RESULT, event_data=event_data) return event_data['result'] except Exception as ex: # pylint: disable=broad-except if cmd_copy.exception_handler: cmd_copy.exception_handler(ex) return CommandResultItem(None, exit_code=1, error=ex) six.reraise(*sys.exc_info())
def connection_validate(cmd, client, connection_name=None, source_resource_group=None, source_id=None, indentifier=None, cluster=None, site=None, spring=None, app=None, deployment='default'): if not source_id or not connection_name: raise RequiredArgumentMissingError( err_msg.format('--source-id, --connection')) # HACK: get linker first to infer target resource type so that user token can be # set to work around OBO linker = todict( client.get(resource_uri=source_id, linker_name=connection_name)) target_id = linker.get('targetService', dict()).get('id', '') target_type = get_resource_type_by_id(target_id) source_type = get_source_resource_name(cmd) client = set_user_token_by_source_and_target(client, cmd.cli_ctx, source_type, target_type) return auto_register(client.begin_validate, resource_uri=source_id, linker_name=connection_name)
def dump(obj): input_dict = todict(obj) json_object = dumps(input_dict, ensure_ascii=False, indent=2, sort_keys=True, separators=(',', ': ')) + '\n' logger.info(json_object)
def form_response(server_result, password, host, connection_string, database_name=None, firewall_id=None): result = todict(server_result) result['connectionString'] = connection_string result['password'] = password if firewall_id is not None: result['firewallName'] = firewall_id if database_name is not None: result['databaseName'] = database_name return result
def transform_message_output(result): result = todict(result) new_result = { 'expirationTime': result.pop('expiresOn', None), 'insertionTime': result.pop('insertedOn', None), 'timeNextVisible': result.pop('nextVisibleOn', None) } from azure.cli.core.commands.arm import make_camel_case for key in sorted(result.keys()): new_result[make_camel_case(key)] = result[key] return new_result
def list_role_assignments(cmd, assignee_object_id, scope=None): ''' :param include_groups: include extra assignments to the groups of which the user is a member(transitively). ''' graph_client = _graph_client_factory(cmd.cli_ctx) factory = _auth_client_factory(cmd.cli_ctx) assignments_client = factory.role_assignments definitions_client = factory.role_definitions assignments = _search_role_assignments(assignments_client, assignee_object_id) subscription_id = get_subscription_id(cmd.cli_ctx) results = todict(assignments) if assignments else [] if not results: return [] # 1. fill in logic names to get things understandable. # (it's possible that associated roles and principals were deleted, and we just do nothing.) # 2. fill in role names role_defs = list( definitions_client.list(scope=(scope if scope else '/subscriptions/' + subscription_id))) role_dics = {i.id: i.role_name for i in role_defs} for i in results: if role_dics.get(i['roleDefinitionId']): i['roleDefinitionName'] = role_dics[i['roleDefinitionId']] # fill in principal names principal_ids = set(i['principalId'] for i in results if i['principalId']) if principal_ids: try: principals = _get_object_stubs(graph_client, principal_ids) principal_dics = { i.object_id: _get_displayable_name(i) for i in principals } for i in [r for r in results if not r.get('principalName')]: i['principalName'] = '' if principal_dics.get(i['principalId']): i['principalName'] = principal_dics[i['principalId']] except (HttpResponseError, GraphErrorException) as ex: # failure on resolving principal due to graph permission should not fail the whole thing logger.info( "Failed to resolve graph object information per error '%s'", ex) return results
def transform_share_directory_json_output(result): result = todict(result) new_result = { "metadata": result.pop('metadata', None), "name": result.pop('name', None), "properties": { "etag": result.pop('etag', None), "lastModified": result.pop('lastModified', None), "serverEncrypted": result.pop('serverEncrypted', None) } } new_result.update(result) return new_result
def generate_examples(self, keywords_list, output_format): """Generate JMESPath query string examples based on keyword_list.""" examples = [] match_list = self._get_matched_nodes(keywords_list) for node_name in match_list: if node_name: # skip root node for node in self._all_nodes.get(node_name): examples.extend(node.get_examples()) if self._config['max_examples'] >= 0: examples = examples[:self._config['max_examples']] if output_format == 'table': for item in examples: item.set_max_length(self._config['examples_len'], self._config['help_len']) return todict(examples)
def transform_file_share_json_output(result): result = todict(result) new_result = { "metadata": result.pop('metadata', None), "name": result.pop('name', None), "properties": { "etag": result.pop('etag', None), "lastModified": result.pop('lastModified', None), "quota": result.pop('quota', None) }, "snapshot": result.pop('snapshot', None) } new_result.update(result) return new_result
def update_access_control_recursive(client, acl, **kwargs): failed_entries = [] # the progress callback is invoked each time a batch is completed def progress_callback(acl_changes): # keep track of failed entries if there are any if acl_changes.batch_failures: failed_entries.extend(acl_changes.batch_failures) result = client.update_access_control_recursive( acl=acl, progress_hook=progress_callback, **kwargs) result = todict(result) result['failedEntries'] = failed_entries return result
def transform_container_json_output(result): result = todict(result) new_result = { "metadata": result.pop('metadata', None), "name": result.pop('name', None), "properties": { "etag": result.pop('etag', None), "hasImmutabilityPolicy": result.pop('hasImmutabilityPolicy', None), "hasLegalHold": result.pop('hasLegalHold', None), "lastModified": result.pop('lastModified', None), "lease": result.pop('lease', None), "publicAccess": result.pop('publicAccess', None) } } new_result.update(result) return new_result
def _datashare_share_subscription_get_synchronization(cmd, client, resource_group_name, account_name, share_subscription_name, synchronization_id): from knack.util import todict from azure.cli.core.commands import AzCliCommandInvoker result = client.list_synchronization( resource_group_name=resource_group_name, account_name=account_name, share_subscription_name=share_subscription_name) result = todict(list(result), AzCliCommandInvoker.remove_additional_prop_layer) return next( (x for x in result if x['synchronizationId'] == synchronization_id), None)
def transform_blob_service_properties(result): from azure.cli.core.commands.arm import make_camel_case static_website = todict(result.pop("static_website", None)) static_website["errorDocument_404Path"] = static_website.pop( "errorDocument404Path", None) new_result = { "cors": result.pop("cors", None), "deleteRetentionPolicy": result.pop("delete_retention_policy", None), "hourMetrics": result.pop("hour_metrics", None), "logging": result.pop("analytics_logging", None), "minuteMetrics": result.pop("minute_metrics", None), "staticWebsite": static_website } for key in result: new_result[make_camel_case(key)] = result[key] return new_result
def transform_share_file_json_output(result): result = todict(result) new_result = { "metadata": result.pop('metadata', None), "name": result.pop('name', None), "properties": { "etag": result.pop('etag', None), "lastModified": result.pop('lastModified', None), "serverEncrypted": result.pop('serverEncrypted', None), "contentLength": result.pop('size', None), "contentRange": result.pop('contentRange', None), "contentSettings": result.pop('contentSettings', None), "copy": result.pop("copy", None) } } new_result.update(result) return new_result
def create_key_vault_reference_connection_if_not_exist(cmd, client, source_id, key_vault_id): from ._validators import get_source_resource_name from knack.log import get_logger logger = get_logger(__name__) logger.warning('get valid key vualt reference connection') key_vault_connections = [] for connection in client.list(resource_uri=source_id): connection = todict(connection) if connection.get('targetService', dict()).get('id') == key_vault_id: key_vault_connections.append(connection) source_name = get_source_resource_name(cmd) auth_info = get_auth_if_no_valid_key_vault_connection( logger, source_name, source_id, key_vault_connections) if not auth_info: return # No Valid Key Vault Connection, Create logger.warning('no valid key vault connection found. Creating...') from ._resource_config import (RESOURCE, CLIENT_TYPE) connection_name = generate_random_string(prefix='keyvault_') parameters = { 'target_service': { "type": "AzureResource", "id": key_vault_id }, 'auth_info': auth_info, 'client_type': CLIENT_TYPE. Dotnet, # Key Vault Configuration are same across all client types } if source_name == RESOURCE.KubernetesCluster: parameters['target_service']['resource_properties'] = { 'type': 'KeyVault', 'connect_as_kubernetes_csi_driver': True, } return auto_register(client.begin_create_or_update, resource_uri=source_id, linker_name=connection_name, parameters=parameters)
def transform_linker_properties(result): from azure.core.polling import LROPoller from ._utils import (run_cli_cmd) # manually polling if result is a poller if isinstance(result, LROPoller): result = result.result() result = todict(result) resource_id = result.get('id') try: output = run_cli_cmd( 'az webapp connection list-configuration --id {} -o json'.format( resource_id)) result['configurations'] = output.get('configurations') except CLIInternalError: pass return result
def list_role_assignments(cmd, assignee_object_id, scope=None): ''' :param include_groups: include extra assignments to the groups of which the user is a member(transitively). ''' graph_client = _graph_client_factory(cmd.cli_ctx) factory = _auth_client_factory(cmd.cli_ctx) assignments_client = factory.role_assignments definitions_client = factory.role_definitions assignments = _search_role_assignments(assignments_client, assignee_object_id) results = todict(assignments) if assignments else [] if not results: return [] # 1. fill in logic names to get things understandable. # (it's possible that associated roles and principals were deleted, and we just do nothing.) # 2. fill in role names role_defs = list(definitions_client.list( scope=(scope if scope else '/subscriptions/' + definitions_client.config.subscription_id))) role_dics = {i.id: i.role_name for i in role_defs} for i in results: if role_dics.get(i['roleDefinitionId']): i['roleDefinitionName'] = role_dics[i['roleDefinitionId']] # fill in principal names principal_ids = set(i['principalId'] for i in results if i['principalId']) if principal_ids: try: principals = _get_object_stubs(graph_client, principal_ids) principal_dics = {i.object_id: _get_displayable_name(i) for i in principals} for i in [r for r in results if not r.get('principalName')]: i['principalName'] = '' if principal_dics.get(i['principalId']): i['principalName'] = principal_dics[i['principalId']] except (CloudError, GraphErrorException) as ex: # failure on resolving principal due to graph permission should not fail the whole thing logger.info("Failed to resolve graph object information per error '%s'", ex) return results
def _to_dict(obj): if isinstance(obj, ErrorResponse): return _to_dict(todict(obj)) if isinstance(obj, dict): result = OrderedDict() # Complex objects should be displayed last sorted_keys = sorted(obj.keys(), key=lambda k: (isinstance(obj[k], dict), isinstance(obj[k], list), k)) for key in sorted_keys: if obj[key] is None or obj[key] == [] or obj[key] == {}: continue result[key] = _to_dict(obj[key]) return result if isinstance(obj, list): return [_to_dict(v) for v in obj] return obj
def transform_blob_json_output(result): result = todict(result) new_result = { "content": "", "deleted": result.pop('deleted', None), "metadata": result.pop('metadata', None), "name": result.pop('name', None), "properties": { "appendBlobCommittedBlockCount": result.pop('appendBlobCommittedBlockCount', None), "blobTier": result.pop('blobTier', None), "blobTierChangeTime": result.pop('blobTierChangeTime', None), "blobTierInferred": result.pop('blobTierInferred', None), "blobType": result.pop('blobType', None), "contentLength": result.pop('size', None), "contentRange": result.pop('contentRange', None), "contentSettings": { "cacheControl": result['contentSettings']['cacheControl'], "contentDisposition": result['contentSettings']['contentDisposition'], "contentEncoding": result['contentSettings']['contentEncoding'], "contentLanguage": result['contentSettings']['contentLanguage'], "contentMd5": _encode_bytes(result['contentSettings']['contentMd5']), "contentType": result['contentSettings']['contentType'] }, "copy": result.pop('copy', None), "creationTime": result.pop('creationTime', None), "deletedTime": result.pop('deletedTime', None), "etag": result.pop('etag', None), "lastModified": result.pop('lastModified', None), "lease": result.pop('lease', None), "pageBlobSequenceNumber": result.pop('pageBlobSequenceNumber', None), "pageRanges": _transform_page_ranges(result.pop('pageRanges', None)), "rehydrationStatus": result.pop('archiveStatus', None), "remainingRetentionDays": result.pop('remainingRetentionDays', None), "serverEncrypted": result.pop('serverEncrypted', None) }, "snapshot": result.pop('snapshot', None) } del result['contentSettings'] new_result.update(result) return new_result
def _run_job(self, expanded_arg, cmd_copy): params = self._filter_params(expanded_arg) try: result = cmd_copy(params) if cmd_copy.supports_no_wait and getattr(expanded_arg, 'no_wait', False): result = None elif cmd_copy.no_wait_param and getattr( expanded_arg, cmd_copy.no_wait_param, False): result = None transform_op = cmd_copy.command_kwargs.get('transform', None) if transform_op: result = transform_op(result) if _is_paged(result): result = list(result) result = todict( result, GraphCliCommandInvoker.remove_additional_prop_layer) # Formatting result so that non utf8 encoded characters are ignored. formatted_json = format_json({'result': result}) result = json.loads(formatted_json) event_data = {'result': result} cmd_copy.cli_ctx.raise_event(EVENT_INVOKER_TRANSFORM_RESULT, event_data=event_data) return event_data['result'] except Exception as ex: # pylint: disable=broad-except if isinstance(ex, HttpResponseError): if ex.status_code == 403: # pylint: disable=no-member self.handle_403() raise CLIError(ex.message) from ex # pylint: disable=no-member if isinstance(ex, AuthenticationException): self.handle_auth_error(ex) if cmd_copy.exception_handler: cmd_copy.exception_handler(ex) return CommandResultItem(None, exit_code=1, error=ex) six.reraise(*sys.exc_info())
def execute(self, args): from knack.events import (EVENT_INVOKER_PRE_CMD_TBL_CREATE, EVENT_INVOKER_POST_CMD_TBL_CREATE, EVENT_INVOKER_CMD_TBL_LOADED, EVENT_INVOKER_PRE_PARSE_ARGS, EVENT_INVOKER_POST_PARSE_ARGS, EVENT_INVOKER_TRANSFORM_RESULT, EVENT_INVOKER_FILTER_RESULT) from knack.util import CommandResultItem, todict from azure.cli.core.commands.events import EVENT_INVOKER_PRE_CMD_TBL_TRUNCATE # TODO: Can't simply be invoked as an event because args are transformed args = _pre_command_table_create(self.cli_ctx, args) self.cli_ctx.raise_event(EVENT_INVOKER_PRE_CMD_TBL_CREATE, args=args) self.commands_loader.load_command_table(args) self.cli_ctx.raise_event(EVENT_INVOKER_PRE_CMD_TBL_TRUNCATE, load_cmd_tbl_func=self.commands_loader.load_command_table, args=args) command = self._rudimentary_get_command(args) telemetry.set_raw_command_name(command) try: self.commands_loader.command_table = {command: self.commands_loader.command_table[command]} except KeyError: # Trim down the command table to reduce the number of subparsers required to optimize the performance. # # When given a command table like this: # # network application-gateway create # network application-gateway delete # network list-usages # storage account create # storage account list # # input: az # output: network application-gateway create # storage account create # # input: az network # output: network application-gateway create # network list-usages cmd_table = {} group_names = set() for cmd_name, cmd in self.commands_loader.command_table.items(): if command and not cmd_name.startswith(command): continue cmd_stub = cmd_name[len(command):].strip() group_name = cmd_stub.split(' ', 1)[0] if group_name not in group_names: cmd_table[cmd_name] = cmd group_names.add(group_name) self.commands_loader.command_table = cmd_table self.commands_loader.command_table = self.commands_loader.command_table # update with the truncated table self.commands_loader.command_name = command self.commands_loader.load_arguments(command) self.cli_ctx.raise_event(EVENT_INVOKER_POST_CMD_TBL_CREATE, commands_loader=self.commands_loader) self.parser.cli_ctx = self.cli_ctx self.parser.load_command_table(self.commands_loader) self.cli_ctx.raise_event(EVENT_INVOKER_CMD_TBL_LOADED, cmd_tbl=self.commands_loader.command_table, parser=self.parser) if not args: self.parser.enable_autocomplete() subparser = self.parser.subparsers[tuple()] self.help.show_welcome(subparser) # TODO: No event in base with which to target telemetry.set_command_details('az') telemetry.set_success(summary='welcome') return None if args[0].lower() == 'help': args[0] = '--help' self.parser.enable_autocomplete() self.cli_ctx.raise_event(EVENT_INVOKER_PRE_PARSE_ARGS, args=args) parsed_args = self.parser.parse_args(args) self.cli_ctx.raise_event(EVENT_INVOKER_POST_PARSE_ARGS, command=parsed_args.command, args=parsed_args) # TODO: This fundamentally alters the way Knack.invocation works here. Cannot be customized # with an event. Would need to be customized via inheritance. results = [] for expanded_arg in _explode_list_args(parsed_args): cmd = expanded_arg.func if hasattr(expanded_arg, 'cmd'): expanded_arg.cmd = cmd self.cli_ctx.data['command'] = expanded_arg.command self._validation(expanded_arg) params = self._filter_params(expanded_arg) command_source = self.commands_loader.command_table[command].command_source extension_version = None extension_name = None try: if isinstance(command_source, ExtensionCommandSource): extension_name = command_source.extension_name extension_version = get_extension(command_source.extension_name).version except Exception: # pylint: disable=broad-except pass telemetry.set_command_details(self.cli_ctx.data['command'], self.data['output'], [(p.split('=', 1)[0] if p.startswith('--') else p[:2]) for p in args if (p.startswith('-') and len(p) > 1)], extension_name=extension_name, extension_version=extension_version) if extension_name: self.data['command_extension_name'] = extension_name deprecations = [] + getattr(expanded_arg, '_argument_deprecations', []) if cmd.deprecate_info: deprecations.append(cmd.deprecate_info) # search for implicit deprecation path_comps = cmd.name.split()[:-1] implicit_deprecate_info = None while path_comps and not implicit_deprecate_info: implicit_deprecate_info = resolve_deprecate_info(self.cli_ctx, ' '.join(path_comps)) del path_comps[-1] if implicit_deprecate_info: deprecate_kwargs = implicit_deprecate_info.__dict__.copy() deprecate_kwargs['object_type'] = 'command' del deprecate_kwargs['_get_tag'] del deprecate_kwargs['_get_message'] deprecations.append(ImplicitDeprecated(**deprecate_kwargs)) for d in deprecations: logger.warning(d.message) try: result = cmd(params) if cmd.supports_no_wait and getattr(expanded_arg, 'no_wait', False): result = None elif cmd.no_wait_param and getattr(expanded_arg, cmd.no_wait_param, False): result = None transform_op = cmd.command_kwargs.get('transform', None) if transform_op: result = transform_op(result) if _is_poller(result): result = LongRunningOperation(self.cli_ctx, 'Starting {}'.format(cmd.name))(result) elif _is_paged(result): result = list(result) result = todict(result, AzCliCommandInvoker.remove_additional_prop_layer) event_data = {'result': result} self.cli_ctx.raise_event(EVENT_INVOKER_TRANSFORM_RESULT, event_data=event_data) result = event_data['result'] results.append(result) except Exception as ex: # pylint: disable=broad-except if cmd.exception_handler: cmd.exception_handler(ex) return None else: six.reraise(*sys.exc_info()) if results and len(results) == 1: results = results[0] event_data = {'result': results} self.cli_ctx.raise_event(EVENT_INVOKER_FILTER_RESULT, event_data=event_data) return CommandResultItem( event_data['result'], table_transformer=self.commands_loader.command_table[parsed_args.command].table_transformer, is_query_active=self.data['query_active'])
def list_role_assignments(cmd, assignee=None, role=None, resource_group_name=None, scope=None, include_inherited=False, show_all=False, include_groups=False, include_classic_administrators=False): ''' :param include_groups: include extra assignments to the groups of which the user is a member(transitively). Supported only for a user principal. ''' graph_client = _graph_client_factory(cmd.cli_ctx) factory = _auth_client_factory(cmd.cli_ctx, scope) assignments_client = factory.role_assignments definitions_client = factory.role_definitions if show_all: if resource_group_name or scope: raise CLIError( 'group or scope are not required when --all is used') scope = None else: scope = _build_role_scope(resource_group_name, scope, definitions_client.config.subscription_id) assignments = _search_role_assignments(cmd.cli_ctx, assignments_client, definitions_client, scope, assignee, role, include_inherited, include_groups) results = todict(assignments) if assignments else [] if include_classic_administrators: results += _backfill_assignments_for_co_admins(cmd.cli_ctx, factory, assignee) if not results: return [] # 1. fill in logic names to get things understandable. # (it's possible that associated roles and principals were deleted, and we just do nothing.) # 2. fill in role names role_defs = list( definitions_client.list( scope=scope or ('/subscriptions/' + definitions_client.config.subscription_id))) role_dics = {i.id: i.properties.role_name for i in role_defs} for i in results: if role_dics.get(i['properties']['roleDefinitionId']): i['properties']['roleDefinitionName'] = role_dics[ i['properties']['roleDefinitionId']] # fill in principal names principal_ids = set(i['properties']['principalId'] for i in results if i['properties']['principalId']) if principal_ids: try: principals = _get_object_stubs(graph_client, principal_ids) principal_dics = { i.object_id: _get_displayable_name(i) for i in principals } for i in [ r for r in results if not r['properties'].get('principalName') ]: i['properties']['principalName'] = '' if principal_dics.get(i['properties']['principalId']): i['properties']['principalName'] = principal_dics[ i['properties']['principalId']] except (CloudError, GraphErrorException) as ex: # failure on resolving principal due to graph permission should not fail the whole thing logger.info( "Failed to resolve graph object information per error '%s'", ex) return results
def verify_property(instance, condition): from jmespath import compile as compile_jmespath result = todict(instance) jmes_query = compile_jmespath(condition) value = jmes_query.search(result) return value
def generate_examples(self, keywords_list): # pylint: disable=unused-argument, no-self-use examples = [] return todict(examples)
def execute(self, args): from knack.events import ( EVENT_INVOKER_PRE_CMD_TBL_CREATE, EVENT_INVOKER_POST_CMD_TBL_CREATE, EVENT_INVOKER_CMD_TBL_LOADED, EVENT_INVOKER_PRE_PARSE_ARGS, EVENT_INVOKER_POST_PARSE_ARGS, EVENT_INVOKER_TRANSFORM_RESULT, EVENT_INVOKER_FILTER_RESULT) from knack.util import CommandResultItem, todict from azure.cli.core.commands.events import EVENT_INVOKER_PRE_CMD_TBL_TRUNCATE # TODO: Can't simply be invoked as an event because args are transformed args = _pre_command_table_create(self.cli_ctx, args) self.cli_ctx.raise_event(EVENT_INVOKER_PRE_CMD_TBL_CREATE, args=args) self.commands_loader.load_command_table(args) self.cli_ctx.raise_event( EVENT_INVOKER_PRE_CMD_TBL_TRUNCATE, load_cmd_tbl_func=self.commands_loader.load_command_table, args=args) command = self._rudimentary_get_command(args) telemetry.set_raw_command_name(command) try: self.commands_loader.command_table = { command: self.commands_loader.command_table[command] } except KeyError: # Trim down the command table to reduce the number of subparsers required to optimize the performance. # # When given a command table like this: # # network application-gateway create # network application-gateway delete # network list-usages # storage account create # storage account list # # input: az # output: network application-gateway create # storage account create # # input: az network # output: network application-gateway create # network list-usages cmd_table = {} group_names = set() for cmd_name, cmd in self.commands_loader.command_table.items(): if command and not cmd_name.startswith(command): continue cmd_stub = cmd_name[len(command):].strip() group_name = cmd_stub.split(' ', 1)[0] if group_name not in group_names: cmd_table[cmd_name] = cmd group_names.add(group_name) self.commands_loader.command_table = cmd_table self.commands_loader.command_table = self.commands_loader.command_table # update with the truncated table self.commands_loader.command_name = command self.commands_loader.load_arguments(command) self.cli_ctx.raise_event(EVENT_INVOKER_POST_CMD_TBL_CREATE, cmd_tbl=self.commands_loader.command_table) self.parser.cli_ctx = self.cli_ctx self.parser.load_command_table(self.commands_loader.command_table) self.cli_ctx.raise_event(EVENT_INVOKER_CMD_TBL_LOADED, cmd_tbl=self.commands_loader.command_table, parser=self.parser) if not args: self.cli_ctx.completion.enable_autocomplete(self.parser) subparser = self.parser.subparsers[tuple()] self.help.show_welcome(subparser) # TODO: No event in base with which to target telemetry.set_command_details('az') telemetry.set_success(summary='welcome') return None if args[0].lower() == 'help': args[0] = '--help' self.cli_ctx.completion.enable_autocomplete(self.parser) self.cli_ctx.raise_event(EVENT_INVOKER_PRE_PARSE_ARGS, args=args) parsed_args = self.parser.parse_args(args) self.cli_ctx.raise_event(EVENT_INVOKER_POST_PARSE_ARGS, command=parsed_args.command, args=parsed_args) # TODO: This fundamentally alters the way Knack.invocation works here. Cannot be customized # with an event. Would need to be customized via inheritance. results = [] for expanded_arg in _explode_list_args(parsed_args): cmd = expanded_arg.func if hasattr(expanded_arg, 'cmd'): expanded_arg.cmd = cmd self.cli_ctx.data['command'] = expanded_arg.command self._validation(expanded_arg) params = self._filter_params(expanded_arg) command_source = self.commands_loader.command_table[ command].command_source extension_version = None try: if command_source: extension_version = get_extension( command_source.extension_name).version except Exception: # pylint: disable=broad-except pass telemetry.set_command_details( self.cli_ctx.data['command'], self.data['output'], [(p.split('=', 1)[0] if p.startswith('--') else p[:2]) for p in args if (p.startswith('-') and len(p) > 1)], extension_name=command_source.extension_name if command_source else None, extension_version=extension_version) if command_source: self.data[ 'command_extension_name'] = command_source.extension_name try: result = cmd(params) if cmd.supports_no_wait and getattr(expanded_arg, 'no_wait', False): result = None elif cmd.no_wait_param and getattr(expanded_arg, cmd.no_wait_param, False): result = None # TODO: Not sure how to make this actually work with the TRANSFORM event... transform_op = cmd.command_kwargs.get('transform', None) if transform_op: result = transform_op(result) if _is_poller(result): result = LongRunningOperation( self.cli_ctx, 'Starting {}'.format(cmd.name))(result) elif _is_paged(result): result = list(result) result = todict(result) event_data = {'result': result} self.cli_ctx.raise_event(EVENT_INVOKER_TRANSFORM_RESULT, event_data=event_data) self.cli_ctx.raise_event(EVENT_INVOKER_FILTER_RESULT, event_data=event_data) result = event_data['result'] results.append(result) except Exception as ex: # pylint: disable=broad-except if cmd.exception_handler: cmd.exception_handler(ex) return None else: six.reraise(*sys.exc_info()) if results and len(results) == 1: results = results[0] return CommandResultItem( results, table_transformer=self.commands_loader.command_table[ parsed_args.command].table_transformer, is_query_active=self.data['query_active'])
def test_application_todict_dict_with_date(self): the_input = date(2017, 10, 13) actual = todict(the_input) expected = the_input.isoformat() self.assertEqual(actual, expected)
def test_application_todict_dict_with_time(self): the_input = time(1, 23, 45) actual = todict(the_input) expected = the_input.isoformat() self.assertEqual(actual, expected)