def validator(namespace):
        type_field_name = '{}_type'.format(property_name)
        property_val = getattr(namespace, property_name, None)
        parent_val = getattr(namespace, parent_name, None) if parent_name else None

        # Check for the different scenarios (order matters)
        # 1) provided value indicates None (pair of empty quotes)
        if property_val in ('', '""', "''") or (property_val is None and default_none):
            if not allow_none:
                raise CLIError('{} cannot be None.'.format(property_option))
            setattr(namespace, type_field_name, 'none')
            setattr(namespace, property_name, None)
            if parent_name and parent_val:
                logger = _logging.get_az_logger(__name__)
                logger.warning('Ignoring: %s %s', parent_option, parent_val)
                setattr(namespace, parent_name, None)
            return  # SUCCESS

        # Create a resource ID we can check for existence.
        (resource_id_parts, value_was_id) = _validate_name_or_id(
            namespace.resource_group_name, property_val, property_type, parent_val, parent_type)

        # 2) resource exists
        if resource_exists(**resource_id_parts):
            setattr(namespace, type_field_name, 'existingId')
            setattr(namespace, property_name, resource_id(**resource_id_parts))
            if parent_val:
                if value_was_id:
                    logger = _logging.get_az_logger(__name__)
                    logger.warning('Ignoring: %s %s', parent_option, parent_val)
                setattr(namespace, parent_name, None)
            return  # SUCCESS

        # if a parent name was required but not specified, raise a usage error
        if has_parent and not value_was_id and not parent_val and not allow_new:
            raise ValueError('incorrect usage: {0} ID | {0} NAME {1} NAME'.format(
                property_option, parent_option))

        # if non-existent ID was supplied, throw error depending on whether a new resource can
        # be created.
        if value_was_id:
            usage_message = '{} NAME'.format(property_option) if not has_parent \
                else '{} NAME [{} NAME]'.format(property_option, parent_option)
            action_message = 'Specify ( {} ) to create a new resource.'.format(usage_message) if \
                allow_new else 'Create the required resource and try again.'
            raise CLIError('{} {} does not exist. {}'.format(
                property_name, property_val, action_message))

        # 3) try to create new resource
        if allow_new:
            setattr(namespace, type_field_name, 'new')
        else:
            raise CLIError(
                '{} {} does not exist. Create the required resource and try again.'.format(
                    property_name, property_val))
Beispiel #2
0
def storage_file_upload_batch(client,
                              destination,
                              source,
                              pattern=None,
                              dryrun=False,
                              validate_content=False,
                              content_settings=None,
                              max_connections=1,
                              metadata=None):
    """
    Upload local files to Azure Storage File Share in batch
    """

    from .files_helpers import glob_files_locally
    source_files = [c for c in glob_files_locally(source, pattern)]

    if dryrun:
        logger = get_az_logger(__name__)
        logger.warning('upload files to file share')
        logger.warning('    account %s', client.account_name)
        logger.warning('      share %s', destination)
        logger.warning('      total %d', len(source_files or []))
        logger.warning(' operations')
        for f in source_files or []:
            logger.warning('  - %s => %s', *f)

        return []

    # TODO: Performance improvement
    # 1. Avoid create directory repeatedly
    # 2. Upload files in parallel
    def _upload_action(source_pair):
        dir_name = os.path.dirname(source_pair[1])
        file_name = os.path.basename(source_pair[1])
        if dir_name and len(dir_name) > 0:
            client.create_directory(share_name=destination,
                                    directory_name=dir_name,
                                    fail_on_exist=False)

        client.create_file_from_path(share_name=destination,
                                     directory_name=dir_name,
                                     file_name=file_name,
                                     local_file_path=source_pair[0],
                                     content_settings=content_settings,
                                     metadata=metadata,
                                     max_connections=max_connections,
                                     validate_content=validate_content)

        return client.make_file_url(destination, dir_name, file_name)

    return list(_upload_action(f) for f in source_files)
Beispiel #3
0
def storage_file_download_batch(client,
                                source,
                                destination,
                                pattern=None,
                                dryrun=False,
                                validate_content=False,
                                max_connections=1):
    """
    Download files from file share to local directory in batch
    """

    from .files_helpers import glob_files_remotely, mkdir_p

    source_files = glob_files_remotely(client, source, pattern)

    if dryrun:
        source_files_list = list(source_files)

        logger = get_az_logger(__name__)
        logger.warning('upload files to file share')
        logger.warning('    account %s', client.account_name)
        logger.warning('      share %s', source)
        logger.warning('destination %s', destination)
        logger.warning('    pattern %s', pattern)
        logger.warning('      total %d', len(source_files_list))
        logger.warning(' operations')
        for f in source_files_list:
            logger.warning('  - %s/%s => %s', f[0], f[1],
                           os.path.join(destination, *f))

        return []

    def _download_action(pair):
        destination_dir = os.path.join(destination, pair[0])
        mkdir_p(destination_dir)
        client.get_file_to_path(source,
                                directory_name=pair[0],
                                file_name=pair[1],
                                file_path=os.path.join(destination, *pair),
                                validate_content=validate_content,
                                max_connections=max_connections)
        return client.make_file_url(source, *pair)

    return list(_download_action(f) for f in source_files)
Beispiel #4
0
def storage_blob_download_batch(client,
                                source,
                                destination,
                                source_container_name,
                                pattern=None,
                                dryrun=False):
    """
    Download blobs in a container recursively

    :param str source:
        The string represents the source of this download operation. The source can be the
        container URL or the container name. When the source is the container URL, the storage
        account name will parsed from the URL.

    :param str destination:
        The string represents the destination folder of this download operation. The folder must
        exist.

    :param bool dryrun:
        Show the summary of the operations to be taken instead of actually download the file(s)

    :param str pattern:
        The pattern is used for files globbing. The supported patterns are '*', '?', '[seq]',
        and '[!seq]'.
    """
    from .util import collect_blobs
    source_blobs = collect_blobs(client, source_container_name, pattern)

    if dryrun:
        logger = get_az_logger(__name__)
        logger.warning('download action: from %s to %s', source, destination)
        logger.warning('    pattern %s', pattern)
        logger.warning('  container %s', source_container_name)
        logger.warning('      total %d', len(source_blobs))
        logger.warning(' operations')
        for b in source_blobs or []:
            logger.warning('  - %s', b)
        return []
    else:
        return list(
            _download_blob(client, source_container_name, destination, blob)
            for blob in source_blobs)
Beispiel #5
0
def storage_file_upload_batch(client, destination, source, pattern=None, dryrun=False,
                              validate_content=False, content_settings=None, max_connections=1,
                              metadata=None):
    """
    Upload local files to Azure Storage File Share in batch
    """

    from .util import glob_files_locally
    source_files = [c for c in glob_files_locally(source, pattern)]

    if dryrun:
        logger = get_az_logger(__name__)
        logger.warning('upload files to file share')
        logger.warning('    account %s', client.account_name)
        logger.warning('      share %s', destination)
        logger.warning('      total %d', len(source_files or []))
        logger.warning(' operations')
        for f in source_files or []:
            logger.warning('  - %s => %s', *f)

        return []

    # TODO: Performance improvement
    # 1. Upload files in parallel
    def _upload_action(source_pair):
        dir_name = os.path.dirname(source_pair[1])
        file_name = os.path.basename(source_pair[1])

        _make_directory_in_files_share(client, destination, dir_name)
        client.create_file_from_path(share_name=destination,
                                     directory_name=dir_name,
                                     file_name=file_name,
                                     local_file_path=source_pair[0],
                                     content_settings=content_settings,
                                     metadata=metadata,
                                     max_connections=max_connections,
                                     validate_content=validate_content)

        return client.make_file_url(destination, dir_name, file_name)

    return list(_upload_action(f) for f in source_files)
Beispiel #6
0
def storage_file_download_batch(client, source, destination, pattern=None, dryrun=False,
                                validate_content=False, max_connections=1):
    """
    Download files from file share to local directory in batch
    """

    from .util import glob_files_remotely, mkdir_p

    source_files = glob_files_remotely(client, source, pattern)

    if dryrun:
        source_files_list = list(source_files)

        logger = get_az_logger(__name__)
        logger.warning('upload files to file share')
        logger.warning('    account %s', client.account_name)
        logger.warning('      share %s', source)
        logger.warning('destination %s', destination)
        logger.warning('    pattern %s', pattern)
        logger.warning('      total %d', len(source_files_list))
        logger.warning(' operations')
        for f in source_files_list:
            logger.warning('  - %s/%s => %s', f[0], f[1], os.path.join(destination, *f))

        return []

    def _download_action(pair):
        destination_dir = os.path.join(destination, pair[0])
        mkdir_p(destination_dir)
        client.get_file_to_path(source,
                                directory_name=pair[0],
                                file_name=pair[1],
                                file_path=os.path.join(destination, *pair),
                                validate_content=validate_content,
                                max_connections=max_connections)
        return client.make_file_url(source, *pair)

    return list(_download_action(f) for f in source_files)
Beispiel #7
0
def storage_blob_download_batch(client, source, destination, source_container_name, pattern=None,
                                dryrun=False):
    """
    Download blobs in a container recursively

    :param str source:
        The string represents the source of this download operation. The source can be the
        container URL or the container name. When the source is the container URL, the storage
        account name will parsed from the URL.

    :param str destination:
        The string represents the destination folder of this download operation. The folder must
        exist.

    :param bool dryrun:
        Show the summary of the operations to be taken instead of actually download the file(s)

    :param str pattern:
        The pattern is used for files globbing. The supported patterns are '*', '?', '[seq]',
        and '[!seq]'.
    """
    from .util import collect_blobs
    source_blobs = collect_blobs(client, source_container_name, pattern)

    if dryrun:
        logger = get_az_logger(__name__)
        logger.warning('download action: from %s to %s', source, destination)
        logger.warning('    pattern %s', pattern)
        logger.warning('  container %s', source_container_name)
        logger.warning('      total %d', len(source_blobs))
        logger.warning(' operations')
        for b in source_blobs or []:
            logger.warning('  - %s', b)
        return []
    else:
        return list(_download_blob(client, source_container_name, destination, blob) for blob in
                    source_blobs)
Beispiel #8
0
import azure.cli.core._logging as _logging

from azure.mgmt.authorization.models import (RoleAssignmentProperties, Permission, RoleDefinition,
                                             RoleDefinitionProperties)

from azure.graphrbac.models import (ApplicationCreateParameters,
                                    ApplicationUpdateParameters,
                                    PasswordCredential,
                                    KeyCredential,
                                    UserCreateParameters,
                                    PasswordProfile,
                                    ServicePrincipalCreateParameters)

from ._client_factory import _auth_client_factory, _graph_client_factory

logger = _logging.get_az_logger(__name__)

_CUSTOM_RULE = 'CustomRole'

def list_role_definitions(name=None, resource_group_name=None, scope=None,
                          custom_role_only=False):
    definitions_client = _auth_client_factory(scope).role_definitions
    scope = _build_role_scope(resource_group_name, scope,
                              definitions_client.config.subscription_id)
    return _search_role_definitions(definitions_client, name, scope, custom_role_only)

def get_role_definition_name_completion_list(prefix, **kwargs):#pylint: disable=unused-argument
    definitions = list_role_definitions()
    return [x.properties.role_name for x in list(definitions)]

def create_role_definition(role_definition):
Beispiel #9
0
import collections
import errno
import json
import os.path
from enum import Enum

import adal
import azure.cli.core._logging as _logging
from azure.cli.core._environment import get_config_dir
from azure.cli.core._session import ACCOUNT
from azure.cli.core._util import CLIError, get_file_json
from azure.cli.core.adal_authentication import AdalAuthentication
from azure.cli.core.cloud import get_cloud
from azure.cli.core.context import get_active_context

logger = _logging.get_az_logger(__name__)

# Names below are used by azure-xplat-cli to persist account information into
# ~/.azure/azureProfile.json or osx/keychainer or windows secure storage,
# which azure-cli will share.
# Please do not rename them unless you know what you are doing.
_IS_DEFAULT_SUBSCRIPTION = 'isDefault'
_SUBSCRIPTION_ID = 'id'
_SUBSCRIPTION_NAME = 'name'
_TENANT_ID = 'tenantId'
_USER_ENTITY = 'user'
_USER_NAME = 'name'
_SUBSCRIPTIONS = 'subscriptions'
_INSTALLATION_ID = 'installationId'
_ENVIRONMENT_NAME = 'environmentName'
_STATE = 'state'
Beispiel #10
0
 def test_get_az_logger_module(self):
     az_module_logger = _logging.get_az_logger('azure.cli.module')
     self.assertEqual(az_module_logger.name, 'az.azure.cli.module')
Beispiel #11
0
 def test_get_az_logger(self):
     az_logger = _logging.get_az_logger()
     self.assertEqual(az_logger.name, 'az')
Beispiel #12
0
def storage_blob_upload_batch(client,
                              source,
                              destination,
                              pattern=None,
                              source_files=None,
                              destination_container_name=None,
                              blob_type=None,
                              content_settings=None,
                              metadata=None,
                              validate_content=False,
                              maxsize_condition=None,
                              max_connections=1,
                              lease_id=None,
                              if_modified_since=None,
                              if_unmodified_since=None,
                              if_match=None,
                              if_none_match=None,
                              timeout=None,
                              dryrun=False):
    """
    Upload files to storage container as blobs

    :param str source:
        The directory where the files to be uploaded.

    :param str destination:
        The string represents the destination of this upload operation. The source can be the
        container URL or the container name. When the source is the container URL, the storage
        account name will parsed from the URL.

    :param str pattern:
        The pattern is used for files globbing. The supported patterns are '*', '?', '[seq]',
        and '[!seq]'.

    :param bool dryrun:
        Show the summary of the operations to be taken instead of actually upload the file(s)

    :param string if_match:
        An ETag value, or the wildcard character (*). Specify this header to perform the operation
        only if the resource's ETag matches the value specified.

    :param string if_none_match:
        An ETag value, or the wildcard character (*). Specify this header to perform the
        operation only if the resource's ETag does not match the value specified. Specify the
        wildcard character (*) to perform the operation only if the resource does not exist,
        and fail the operation if it does exist.
    """
    def _append_blob(file_path, blob_name):
        if not client.exists(destination_container_name, blob_name):
            client.create_blob(container_name=destination_container_name,
                               blob_name=blob_name,
                               content_settings=content_settings,
                               metadata=metadata,
                               lease_id=lease_id,
                               if_modified_since=if_modified_since,
                               if_match=if_match,
                               if_none_match=if_none_match,
                               timeout=timeout)

        return client.append_blob_from_path(
            container_name=destination_container_name,
            blob_name=blob_name,
            file_path=file_path,
            progress_callback=lambda c, t: None,
            validate_content=validate_content,
            maxsize_condition=maxsize_condition,
            lease_id=lease_id,
            timeout=timeout)

    def _upload_blob(file_path, blob_name):
        return client.create_blob_from_path(
            container_name=destination_container_name,
            blob_name=blob_name,
            file_path=file_path,
            progress_callback=lambda c, t: None,
            content_settings=content_settings,
            metadata=metadata,
            validate_content=validate_content,
            max_connections=max_connections,
            lease_id=lease_id,
            if_modified_since=if_modified_since,
            if_unmodified_since=if_unmodified_since,
            if_match=if_match,
            if_none_match=if_none_match,
            timeout=timeout)

    upload_action = _upload_blob if blob_type == 'block' or blob_type == 'page' else _append_blob

    if dryrun:
        logger = get_az_logger(__name__)
        logger.warning('upload action: from %s to %s', source, destination)
        logger.warning('    pattern %s', pattern)
        logger.warning('  container %s', destination_container_name)
        logger.warning('       type %s', blob_type)
        logger.warning('      total %d', len(source_files))
        logger.warning(' operations')
        for f in source_files or []:
            logger.warning('  - %s => %s', *f)
    else:
        for f in source_files or []:
            print('uploading {}'.format(f[0]))
            upload_action(*f)
Beispiel #13
0
def storage_blob_download_batch(client,
                                source,
                                destination,
                                source_container_name,
                                pattern=None,
                                dryrun=False):
    """
    Download blobs in a container recursively

    :param str source:
        The string represents the source of this download operation. The source can be the
        container URL or the container name. When the source is the container URL, the storage
        account name will parsed from the URL.

    :param str destination:
        The string represents the destination folder of this download operation. The folder must
        exist.

    :param bool dryrun:
        Show the summary of the operations to be taken instead of actually download the file(s)

    :param str pattern:
        The pattern is used for files globbing. The supported patterns are '*', '?', '[seq]',
        and '[!seq]'.
    """
    import os.path
    from fnmatch import fnmatch

    def _pattern_has_wildcards(p):
        return not p or p.find('*') != -1 or p.find('?') != -1 or p.find(
            '[') != -1

    source_blobs = []
    if not _pattern_has_wildcards(pattern):
        source_blobs.append(pattern)
    else:
        # IDEA:
        # 1. Listing is slow. It can be done in parallel with copying.
        # 2. Use the prefix parameter to reduce the returned blobs list size
        source_blobs = client.list_blobs(source_container_name)

        if pattern:
            source_blobs = [
                blob.name for blob in source_blobs
                if fnmatch(blob.name, pattern)
            ]
        else:
            source_blobs = [blob.name for blob in source_blobs]

        if not any(source_blobs):
            return []

    result = []
    if dryrun:
        logger = get_az_logger(__name__)
        logger.warning('download action: from %s to %s', source, destination)
        logger.warning('    pattern %s', pattern)
        logger.warning('  container %s', source_container_name)
        logger.warning('      total %d', len(source_blobs))
        logger.warning(' operations')
        for b in source_blobs or []:
            logger.warning('  - %s', b)
    else:
        # TODO: try catch IO exception
        for blob in source_blobs:
            dst = os.path.join(destination, blob)
            dst_folder = os.path.dirname(dst)
            if not os.path.exists(dst_folder):
                os.makedirs(dst_folder)

            result.append(
                client.get_blob_to_path(source_container_name, blob, dst))

    return result
Beispiel #14
0
def storage_blob_upload_batch(client, source, destination, pattern=None, source_files=None,
                              destination_container_name=None, blob_type=None,
                              content_settings=None, metadata=None, validate_content=False,
                              maxsize_condition=None, max_connections=2, lease_id=None,
                              if_modified_since=None, if_unmodified_since=None, if_match=None,
                              if_none_match=None, timeout=None, dryrun=False):
    """
    Upload files to storage container as blobs

    :param str source:
        The directory where the files to be uploaded.

    :param str destination:
        The string represents the destination of this upload operation. The source can be the
        container URL or the container name. When the source is the container URL, the storage
        account name will parsed from the URL.

    :param str pattern:
        The pattern is used for files globbing. The supported patterns are '*', '?', '[seq]',
        and '[!seq]'.

    :param bool dryrun:
        Show the summary of the operations to be taken instead of actually upload the file(s)

    :param string if_match:
        An ETag value, or the wildcard character (*). Specify this header to perform the operation
        only if the resource's ETag matches the value specified.

    :param string if_none_match:
        An ETag value, or the wildcard character (*). Specify this header to perform the
        operation only if the resource's ETag does not match the value specified. Specify the
        wildcard character (*) to perform the operation only if the resource does not exist,
        and fail the operation if it does exist.
    """
    def _append_blob(file_path, blob_name):
        if not client.exists(destination_container_name, blob_name):
            client.create_blob(
                container_name=destination_container_name,
                blob_name=blob_name,
                content_settings=content_settings,
                metadata=metadata,
                lease_id=lease_id,
                if_modified_since=if_modified_since,
                if_match=if_match,
                if_none_match=if_none_match,
                timeout=timeout)

        return client.append_blob_from_path(
            container_name=destination_container_name,
            blob_name=blob_name,
            file_path=file_path,
            progress_callback=lambda c, t: None,
            validate_content=validate_content,
            maxsize_condition=maxsize_condition,
            lease_id=lease_id,
            timeout=timeout)

    def _upload_blob(file_path, blob_name):
        return client.create_blob_from_path(
            container_name=destination_container_name,
            blob_name=blob_name,
            file_path=file_path,
            progress_callback=lambda c, t: None,
            content_settings=content_settings,
            metadata=metadata,
            validate_content=validate_content,
            max_connections=max_connections,
            lease_id=lease_id,
            if_modified_since=if_modified_since,
            if_unmodified_since=if_unmodified_since,
            if_match=if_match,
            if_none_match=if_none_match,
            timeout=timeout)

    upload_action = _upload_blob if blob_type == 'block' or blob_type == 'page' else _append_blob

    if dryrun:
        logger = get_az_logger(__name__)
        logger.warning('upload action: from %s to %s', source, destination)
        logger.warning('    pattern %s', pattern)
        logger.warning('  container %s', destination_container_name)
        logger.warning('       type %s', blob_type)
        logger.warning('      total %d', len(source_files))
        logger.warning(' operations')
        for f in source_files or []:
            logger.warning('  - %s => %s', *f)
    else:
        for f in source_files or []:
            print('uploading {}'.format(f[0]))
            upload_action(*f)
Beispiel #15
0
def storage_file_copy_batch(client, source_client,
                            destination_share=None, destination_path=None,
                            source_container=None, source_share=None, source_sas=None,
                            pattern=None, dryrun=False, metadata=None, timeout=None):
    """
    Copy a group of files asynchronously
    """
    logger = None
    if dryrun:
        logger = get_az_logger(__name__)
        logger.warning('copy files or blobs to file share')
        logger.warning('    account %s', client.account_name)
        logger.warning('      share %s', destination_share)
        logger.warning('       path %s', destination_path)
        logger.warning('     source %s', source_container or source_share)
        logger.warning('source type %s', 'blob' if source_container else 'file')
        logger.warning('    pattern %s', pattern)
        logger.warning(' operations')

    if source_container:
        # copy blobs to file share

        # if the source client is None, recreate one from the destination client.
        source_client = source_client or create_blob_service_from_storage_client(client)

        # the cache of existing directories in the destination file share. the cache helps to avoid
        # repeatedly create existing directory so as to optimize the performance.
        existing_dirs = set([])

        def action_blob_copy(blob_name):
            if dryrun:
                logger.warning('  - copy blob %s', blob_name)
            else:
                _create_file_and_directory_from_blob(
                    client, source_client, destination_share, source_container, source_sas,
                    blob_name, destination_dir=destination_path, metadata=metadata, timeout=timeout,
                    existing_dirs=existing_dirs)

        return list(filter_none(action_blob_copy(blob) for blob in
                                collect_blobs(source_client, source_container, pattern)))

    elif source_share:
        # copy files from share to share

        # if the source client is None, assume the file share is in the same storage account as
        # destination, therefore client is reused.
        source_client = source_client or client

        # the cache of existing directories in the destination file share. the cache helps to avoid
        # repeatedly create existing directory so as to optimize the performance.
        existing_dirs = set([])

        def action_file_copy(file_info):
            dir_name, file_name = file_info
            if dryrun:
                logger.warning('  - copy file %s', os.path.join(dir_name, file_name))
            else:
                _create_file_and_directory_from_file(
                    client, source_client, destination_share, source_share, source_sas, dir_name,
                    file_name, destination_dir=destination_path, metadata=metadata,
                    timeout=timeout, existing_dirs=existing_dirs)

        return list(filter_none(action_file_copy(file) for file in
                                collect_files(source_client, source_share, pattern)))
    else:
        # won't happen, the validator should ensure either source_container or source_share is set
        raise ValueError('Fail to find source. Neither blob container or file share is specified.')
Beispiel #16
0
    def validator(namespace):
        type_field_name = '{}_type'.format(property_name)
        property_val = getattr(namespace, property_name, None)
        parent_val = getattr(namespace, parent_name,
                             None) if parent_name else None

        # Check for the different scenarios (order matters)
        # 1) provided value indicates None (pair of empty quotes)
        if property_val in ('', '""', "''") or (property_val is None
                                                and default_none):
            if not allow_none:
                raise CLIError('{} cannot be None.'.format(property_option))
            setattr(namespace, type_field_name, 'none')
            setattr(namespace, property_name, None)
            if parent_name and parent_val:
                logger = _logging.get_az_logger(__name__)
                logger.warning('Ignoring: %s %s', parent_option, parent_val)
                setattr(namespace, parent_name, None)
            return  # SUCCESS

        # Create a resource ID we can check for existence.
        (resource_id_parts,
         value_was_id) = _validate_name_or_id(namespace.resource_group_name,
                                              property_val, property_type,
                                              parent_val, parent_type)

        # 2) resource exists
        if resource_exists(**resource_id_parts):
            setattr(namespace, type_field_name, 'existingId')
            setattr(namespace, property_name, resource_id(**resource_id_parts))
            if parent_val:
                if value_was_id:
                    logger = _logging.get_az_logger(__name__)
                    logger.warning('Ignoring: %s %s', parent_option,
                                   parent_val)
                setattr(namespace, parent_name, None)
            return  # SUCCESS

        # if a parent name was required but not specified, raise a usage error
        if has_parent and not value_was_id and not parent_val and not allow_new:
            raise ValueError(
                'incorrect usage: {0} ID | {0} NAME {1} NAME'.format(
                    property_option, parent_option))

        # if non-existent ID was supplied, throw error depending on whether a new resource can
        # be created.
        if value_was_id:
            usage_message = '{} NAME'.format(property_option) if not has_parent \
                else '{} NAME [{} NAME]'.format(property_option, parent_option)
            action_message = 'Specify ( {} ) to create a new resource.'.format(usage_message) if \
                allow_new else 'Create the required resource and try again.'
            raise CLIError('{} {} does not exist. {}'.format(
                property_name, property_val, action_message))

        # 3) try to create new resource
        if allow_new:
            setattr(namespace, type_field_name, 'new')
        else:
            raise CLIError(
                '{} {} does not exist. Create the required resource and try again.'
                .format(property_name, property_val))