def _load_public_certificate_file(self,
                                   client,
                                   resource_group,
                                   service,
                                   loaded_public_certificate_file=None,
                                   **_):
     if not loaded_public_certificate_file:
         return
     data = get_file_json(loaded_public_certificate_file)
     if not data:
         return
     if not data.get('loadedCertificates'):
         raise FileOperationError(
             "loadedCertificates must be provided in the json file")
     loaded_certificates = []
     for item in data['loadedCertificates']:
         if not item.get('certificateName') or not item.get(
                 'loadTrustStore'):
             raise FileOperationError(
                 "certificateName, loadTrustStore must be provided in the json file"
             )
         certificate_resource = client.certificates.get(
             resource_group, service, item['certificateName'])
         loaded_certificates.append(
             models.LoadedCertificate(
                 resource_id=certificate_resource.id,
                 load_trust_store=item['loadTrustStore']))
     return loaded_certificates
Esempio n. 2
0
def data_collection_rules_create(client,
                                 resource_group_name,
                                 data_collection_rule_name,
                                 rule_file,
                                 location=None,
                                 tags=None,
                                 description=None):
    from azure.cli.core.util import get_file_json
    from azure.cli.core.azclierror import FileOperationError, UnclassifiedUserFault
    body = {}
    body['location'] = location
    body['tags'] = tags
    body['description'] = description
    try:
        json_data = get_file_json(rule_file)
    except FileNotFoundError:
        raise FileOperationError("No such file: " + str(rule_file))
    except IsADirectoryError:
        raise FileOperationError("Is a directory: " + str(rule_file))
    except PermissionError:
        raise FileOperationError("Permission denied: " + str(rule_file))
    except OSError as e:
        raise UnclassifiedUserFault(e)
    for key_prop in json_data:
        if key_prop == 'properties':
            data = json_data['properties']
        else:
            data = json_data
    for key in data:
        if key == 'dataSources':
            body['data_sources'] = {}
            for key_ds in data['dataSources']:
                if key_ds == 'performanceCounters':
                    body['data_sources']['performance_counters'] = data[
                        'dataSources']['performanceCounters']
                if key_ds == 'windowsEventLogs':
                    body['data_sources']['windows_event_logs'] = data[
                        'dataSources']['windowsEventLogs']
                if key_ds == 'syslog':
                    body['data_sources']['syslog'] = data['dataSources'][
                        'syslog']
                if key_ds == 'extensions':
                    body['data_sources']['extensions'] = data['dataSources'][
                        'extensions']
        if key == 'destinations':
            body['destinations'] = {}
            for key_de in data['destinations']:
                if key_de == 'logAnalytics':
                    body['destinations']['log_analytics'] = data[
                        'destinations']['logAnalytics']
                if key_de == 'azureMonitorMetrics':
                    body['destinations']['azure_monitor_metrics'] = data[
                        'destinations']['azureMonitorMetrics']
        if key == 'dataFlows':
            body['data_flows'] = data['dataFlows']
    return _data_collection_rules_create(
        client,
        resource_group_name=resource_group_name,
        data_collection_rule_name=data_collection_rule_name,
        body=body)
Esempio n. 3
0
def get_cmd_file_path():

    import winreg
    try:
        # Connecting to key in registry
        accessRegistry = winreg.ConnectRegistry(None,
                                                winreg.HKEY_LOCAL_MACHINE)

        # Get the path of Integration Runtime
        accessKey = winreg.OpenKey(
            accessRegistry,
            r"SOFTWARE\Microsoft\DataTransfer\DataManagementGateway\ConfigurationManager"
        )
        accessValue = winreg.QueryValueEx(accessKey, r"DiacmdPath")[0]

        return accessValue
    except FileNotFoundError:
        try:
            diaCmdPath = get_cmd_file_path_static()
            return diaCmdPath
        except FileNotFoundError as e:
            raise FileOperationError(
                "Failed: No installed IR found or installed IR is not present in Program Files. Please install Integration Runtime in default location and re-run this command"
            ) from e
        except IndexError as e:
            raise FileOperationError(
                "IR is not properly installed. Please re-install it and re-run this command"
            ) from e
Esempio n. 4
0
def validate_config_file_path(path, action):

    if not os.path.exists(path):
        raise InvalidArgumentValueError(f'Invalid config file path: {path}. Please provide a valid config file path.')

    # JSON file read and validation of value in action
    with open(path, "r", encoding=None) as f:
        configJson = json.loads(f.read())
    try:
        if not configJson['action'].strip().lower() == action:
            raise FileOperationError(f"The desired action in config file was invalid. Please use \"{action}\" for action property in config file")
    except KeyError as e:
        raise FileOperationError("Invalid schema of config file. Please ensure that this is a properly formatted config file.") from e
def file_related_exception_handler(ex):
    from azure.cli.core.azclierror import FileOperationError
    if isinstance(ex, FileNotFoundError):
        raise FileOperationError(ex, recommendation='Please check the file path.')
    if isinstance(ex, PermissionError):
        raise FileOperationError(ex,
                                 recommendation='Please make sure you have enough permissions on the file/directory.')
    if isinstance(ex, IsADirectoryError):
        raise FileOperationError(ex, recommendation='File is expected, not a directory.')
    if isinstance(ex, NotADirectoryError):
        raise FileOperationError(ex, recommendation='Directory is expected, not a file.')
    import sys
    from six import reraise
    reraise(*sys.exc_info())
Esempio n. 6
0
def run_bicep_command(args, auto_install=True, check_upgrade=True):
    installation_path = _get_bicep_installation_path(platform.system())
    installed = os.path.isfile(installation_path)

    if not installed:
        if auto_install:
            ensure_bicep_installation()
        else:
            raise FileOperationError(
                'Bicep CLI not found. Install it now by running "az bicep install".'
            )
    elif check_upgrade:
        with suppress(ClientRequestError):
            # Checking upgrade should ignore connection issues.
            # Users may continue using the current installed version.
            installed_version = _get_bicep_installed_version(installation_path)
            latest_release_tag = get_bicep_latest_release_tag()
            latest_version = _extract_semver(latest_release_tag)
            if installed_version and latest_version and semver.compare(
                    installed_version, latest_version) < 0:
                _logger.warning(
                    'A new Bicep release is available: %s. Upgrade now by running "az bicep upgrade".',
                    latest_release_tag,
                )

    return _run_command(installation_path, args)
Esempio n. 7
0
def migration_create_func(cmd,
                          client,
                          resource_group_name,
                          server_name,
                          properties,
                          migration_name=None):

    subscription_id = get_subscription_id(cmd.cli_ctx)
    properties_filepath = os.path.join(os.path.abspath(os.getcwd()),
                                       properties)
    if not os.path.exists(properties_filepath):
        raise FileOperationError(
            "Properties file does not exist in the given location")
    with open(properties_filepath, "r") as f:
        json_data = f.read()
    if migration_name is None:
        # Convert a UUID to a string of hex digits in standard form
        migration_name = str(uuid.uuid4())
    r = send_raw_request(
        cmd.cli_ctx, "put",
        "https://management.azure.com/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBforPostgreSQL/flexibleServers/{}/migrations/{}?api-version=2020-02-14-privatepreview"
        .format(subscription_id, resource_group_name, server_name,
                migration_name), None, None, json_data)

    return r.json()
def migration_create_func(cmd,
                          client,
                          resource_group_name,
                          server_name,
                          properties,
                          migration_name=None):

    subscription_id = get_subscription_id(cmd.cli_ctx)
    properties_filepath = os.path.join(os.path.abspath(os.getcwd()),
                                       properties)
    if not os.path.exists(properties_filepath):
        raise FileOperationError(
            "Properties file does not exist in the given location")
    with open(properties_filepath, "r") as f:
        try:
            request_payload = json.load(f)
            request_payload.get("properties")['TriggerCutover'] = 'true'
            json_data = json.dumps(request_payload)
        except ValueError as err:
            logger.error(err)
            raise BadRequestError(
                "Invalid json file. Make sure that the json file content is properly formatted."
            )
    if migration_name is None:
        # Convert a UUID to a string of hex digits in standard form
        migration_name = str(uuid.uuid4())
    r = send_raw_request(
        cmd.cli_ctx, "put",
        "https://management.azure.com/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBforPostgreSQL/flexibleServers/{}/migrations/{}?api-version=2020-02-14-privatepreview"
        .format(subscription_id, resource_group_name, server_name,
                migration_name), None, None, json_data)

    return r.json()
Esempio n. 9
0
def _check_dotnet_available():
    """
    Will fail if dotnet cannot be executed on the system.
    """
    args = ["dotnet", "--version"]

    try:
        import subprocess
        result = subprocess.run(args, stdout=subprocess.PIPE, check=False)
    except FileNotFoundError as e:
        raise FileOperationError(
            "Could not find 'dotnet' on the system.") from e

    if result.returncode != 0:
        raise FileOperationError(
            f"Failed to run 'dotnet'. (Error {result.returncode})")
Esempio n. 10
0
def aro_list_admin_credentials(cmd,
                               client,
                               resource_group_name,
                               resource_name,
                               file="kubeconfig"):
    # check for the presence of the feature flag and warn
    # the check shouldn't block the API call - ARM can cache a feature state for several minutes
    feature_client = get_mgmt_service_client(
        cmd.cli_ctx, ResourceType.MGMT_RESOURCE_FEATURES)
    feature = feature_client.features.get(
        resource_provider_namespace="Microsoft.RedHatOpenShift",
        feature_name="AdminKubeconfig")
    accepted_states = ["Registered", "Registering"]
    if feature.properties.state not in accepted_states:
        logger.warning(
            "This operation requires the Microsoft.RedHatOpenShift/AdminKubeconfig feature to be registered"
        )
        logger.warning(
            "To register run: az feature register --namespace Microsoft.RedHatOpenShift -n AdminKubeconfig"
        )
    query_result = client.list_admin_credentials(resource_group_name,
                                                 resource_name)
    file_mode = "x"
    yaml_data = b64decode(query_result.kubeconfig).decode('UTF-8')
    try:
        with open(file, file_mode, encoding="utf-8") as f:
            f.write(yaml_data)
    except FileExistsError as e:
        raise FileOperationError(f"File {file} already exists.") from e
    logger.info("Kubeconfig written to file: %s", file)
Esempio n. 11
0
def install_clusterctl(_cmd,
                       client_version="latest",
                       install_location=None,
                       source_url=None):
    """
    Install clusterctl, a command-line interface for Cluster API Kubernetes clusters.
    """

    if not source_url:
        source_url = "https://github.com/kubernetes-sigs/cluster-api/releases/"
        # TODO: mirror clusterctl binary to Azure China cloud--see install_kubectl().

    if client_version != "latest":
        source_url += "tags/"
    source_url += "{}/download/clusterctl-{}-amd64"

    file_url = ""
    system = platform.system()
    if system in ("Darwin", "Linux"):
        file_url = source_url.format(client_version, system.lower())
    else:  # TODO: support Windows someday?
        raise ValidationError(
            'The clusterctl binary is not available for "{}"'.format(system))

    # ensure installation directory exists
    if install_location is None:
        install_location = _get_default_install_location("clusterctl")
    install_dir, cli = os.path.dirname(install_location), os.path.basename(
        install_location)
    if not os.path.exists(install_dir):
        os.makedirs(install_dir)

    logger.warning('Downloading client to "%s" from "%s"', install_location,
                   file_url)
    try:
        urlretrieve(file_url, install_location)
        perms = (os.stat(install_location).st_mode | stat.S_IXUSR
                 | stat.S_IXGRP | stat.S_IXOTH)
        os.chmod(install_location, perms)
    except IOError as ex:
        err_msg = "Connection error while attempting to download client ({})".format(
            ex)
        raise FileOperationError(err_msg)

    logger.warning(
        "Please ensure that %s is in your search PATH, so the `%s` command can be found.",
        install_dir,
        cli,
    )
Esempio n. 12
0
def download_binary(install_location, install_dir, file_url, system, cli):

    logger.info('Downloading client to "%s" from "%s"', install_location, file_url)
    try:
        urlretrieve(file_url, install_location)
        os.chmod(
            install_location,
            os.stat(install_location).st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
        )
    except IOError as ex:
        err_msg = f"Connection error while attempting to download client ({ex})"
        raise FileOperationError(err_msg) from ex

    if system == "Windows":
        # be verbose, as the install_location is likely not in Windows's search PATHs
        env_paths = os.environ["PATH"].split(";")
        found = next(
            (x for x in env_paths if x.lower().rstrip("\\") == install_dir.lower()),
            None,
        )
        if not found:
            # pylint: disable=logging-format-interpolation
            logger.warning(
                'Please add "%s" to your search PATH so the `%s` can be found. 2 options: \n'
                '    1. Run "set PATH=%%PATH%%;%s" or "$env:path += \'%s\'" for PowerShell. '
                "This is good for the current command session.\n"
                "    2. Update system PATH environment variable by following "
                '"Control Panel->System->Advanced->Environment Variables", and re-open the command window. '
                "You only need to do it once",
                install_dir, cli, install_dir, install_dir,
            )
    else:
        if not which(cli):
            logger.warning(
                "Please ensure that %s is in your search PATH, so the `%s` command can be found.",
                install_dir,
                cli,
            )
    return install_location
Esempio n. 13
0
def import_zone(cmd, resource_group_name, private_zone_name, file_name):
    from azure.cli.core.util import read_file_content
    import sys
    from azure.mgmt.privatedns.models import RecordSet

    from azure.cli.core.azclierror import FileOperationError, UnclassifiedUserFault
    try:
        file_text = read_file_content(file_name)
    except FileNotFoundError:
        raise FileOperationError("No such file: " + str(file_name))
    except IsADirectoryError:
        raise FileOperationError("Is a directory: " + str(file_name))
    except PermissionError:
        raise FileOperationError("Permission denied: " + str(file_name))
    except OSError as e:
        raise UnclassifiedUserFault(e)

    zone_obj = parse_zone_file(file_text, private_zone_name)
    origin = private_zone_name
    record_sets = {}

    for record_set_name in zone_obj:
        for record_set_type in zone_obj[record_set_name]:
            record_set_obj = zone_obj[record_set_name][record_set_type]

            if record_set_type == 'soa':
                origin = record_set_name.rstrip('.')

            if not isinstance(record_set_obj, list):
                record_set_obj = [record_set_obj]

            for entry in record_set_obj:

                record_set_ttl = entry['ttl']
                record_set_key = '{}{}'.format(record_set_name.lower(),
                                               record_set_type)

                record = _build_record(cmd, entry)
                if not record:
                    logger.warning(
                        'Cannot import %s. RecordType is not found. Skipping...',
                        entry['delim'].lower())
                    continue

                record_set = record_sets.get(record_set_key, None)
                if not record_set:

                    # Workaround for issue #2824
                    relative_record_set_name = record_set_name.rstrip('.')
                    if not relative_record_set_name.endswith(origin):
                        logger.warning(
                            'Cannot import %s. Only records relative to origin may be '
                            'imported at this time. Skipping...',
                            relative_record_set_name)
                        continue

                    record_set = RecordSet(ttl=record_set_ttl)
                    record_sets[record_set_key] = record_set
                _privatedns_add_record(record_set,
                                       record,
                                       record_set_type,
                                       is_list=record_set_type.lower()
                                       not in ['soa', 'cname'])

    total_records = 0
    for key, rs in record_sets.items():
        rs_name, rs_type = key.lower().rsplit('.', 1)
        rs_name = rs_name[:-(len(origin) + 1)] if rs_name != origin else '@'
        try:
            record_count = len(
                getattr(rs, _privatedns_type_to_property_name(rs_type)))
        except TypeError:
            record_count = 1
        total_records += record_count
    cum_records = 0

    from azure.mgmt.privatedns import PrivateDnsManagementClient
    from azure.mgmt.privatedns.models import PrivateZone
    client = get_mgmt_service_client(cmd.cli_ctx, PrivateDnsManagementClient)

    print('== BEGINNING ZONE IMPORT: {} ==\n'.format(private_zone_name),
          file=sys.stderr)

    if private_zone_name.endswith(".local"):
        logger.warning((
            "Please be aware that DNS names ending with .local are reserved for use with multicast DNS "
            "and may not work as expected with some operating systems. For details refer to your operating systems documentation."
        ))
    zone = PrivateZone(location='global')
    result = LongRunningOperation(cmd.cli_ctx)(
        client.private_zones.create_or_update(resource_group_name,
                                              private_zone_name, zone))
    if result.provisioning_state != 'Succeeded':
        raise CLIError(
            'Error occured while creating or updating private dns zone.')

    for key, rs in record_sets.items():

        rs_name, rs_type = key.lower().rsplit('.', 1)
        rs_name = '@' if rs_name == origin else rs_name
        if rs_name.endswith(origin):
            rs_name = rs_name[:-(len(origin) + 1)]

        try:
            record_count = len(
                getattr(rs, _privatedns_type_to_property_name(rs_type)))
        except TypeError:
            record_count = 1
        if rs_name == '@' and rs_type == 'soa':
            root_soa = client.record_sets.get(resource_group_name,
                                              private_zone_name, 'soa', '@')
            rs.soa_record.host = root_soa.soa_record.host
            rs_name = '@'
        try:
            client.record_sets.create_or_update(resource_group_name,
                                                private_zone_name, rs_type,
                                                rs_name, rs)
            cum_records += record_count
            print("({}/{}) Imported {} records of type '{}' and name '{}'".
                  format(cum_records, total_records, record_count, rs_type,
                         rs_name),
                  file=sys.stderr)
        except CloudError as ex:
            logger.error(ex)
    print("\n== {}/{} RECORDS IMPORTED SUCCESSFULLY: '{}' ==".format(
        cum_records, total_records, private_zone_name),
          file=sys.stderr)
Esempio n. 14
0
def storage_blob_download_batch(client, source, destination, container_name, pattern=None, dryrun=False,
                                progress_callback=None, socket_timeout=None, **kwargs):
    source_blobs = collect_blobs(client, container_name, pattern)
    blobs_to_download = {}
    for blob_name in source_blobs:
        # remove starting path seperator and normalize
        normalized_blob_name = normalize_blob_file_path(None, blob_name)
        if normalized_blob_name in blobs_to_download:
            raise CLIError('Multiple blobs with download path: `{}`. As a solution, use the `--pattern` parameter '
                           'to select for a subset of blobs to download OR utilize the `storage blob download` '
                           'command instead to download individual blobs.'.format(normalized_blob_name))
        blobs_to_download[normalized_blob_name] = blob_name

    results = []
    if dryrun:
        # download_blobs = _blob_precondition_check(source_blobs, if_modified_since=if_modified_since,
        #                                           if_unmodified_since=if_unmodified_since)
        logger.warning('download action: from %s to %s', source, destination)
        logger.warning('    pattern %s', pattern)
        logger.warning('  container %s', container_name)
        logger.warning('      total %d', len(source_blobs))
        logger.warning(' operations')
        for b in source_blobs:
            logger.warning('  - %s', b)

    else:
        @check_precondition_success
        def _download_blob(*args, **kwargs):
            blob = download_blob(*args, **kwargs)
            return blob.name

        # Tell progress reporter to reuse the same hook
        if progress_callback:
            progress_callback.reuse = True

        for index, blob_normed in enumerate(blobs_to_download):
            from azure.cli.core.azclierror import FileOperationError
            # add blob name and number to progress message
            if progress_callback:
                progress_callback.message = '{}/{}: "{}"'.format(
                    index + 1, len(blobs_to_download), blobs_to_download[blob_normed])
            blob_client = client.get_blob_client(container=container_name,
                                                 blob=blobs_to_download[blob_normed])
            destination_path = os.path.join(destination, os.path.normpath(blob_normed))
            destination_folder = os.path.dirname(destination_path)
            # Failed when there is same name for file and folder
            if os.path.isfile(destination_path) and os.path.exists(destination_folder):
                raise FileOperationError("%s already exists in %s. Please rename existing file or choose another "
                                         "destination folder. ")
            if not os.path.exists(destination_folder):
                mkdir_p(destination_folder)
            include, result = _download_blob(client=blob_client, file_path=destination_path,
                                             progress_callback=progress_callback, **kwargs)
            if include:
                results.append(result)

        # end progress hook
        if progress_callback:
            progress_callback.hook.end()
        num_failures = len(blobs_to_download) - len(results)
        if num_failures:
            logger.warning('%s of %s files not downloaded due to "Failed Precondition"',
                           num_failures, len(blobs_to_download))
    return results
Esempio n. 15
0
def install_kubectl(cmd, client_version="latest", install_location=None, source_url=None):
    """
    Install kubectl, a command-line interface for Kubernetes clusters.
    """

    if not source_url:
        source_url = "https://storage.googleapis.com/kubernetes-release/release"
        cloud_name = cmd.cli_ctx.cloud.name
        if cloud_name.lower() == "azurechinacloud":
            source_url = "https://mirror.azure.cn/kubernetes/kubectl"

    if client_version == "latest":
        context = ssl_context()
        version = urlopen(source_url + "/stable.txt", context=context).read()
        client_version = version.decode("UTF-8").strip()
    else:
        client_version = "v%s" % client_version

    file_url = ""
    system = platform.system()
    base_url = source_url + "/{}/bin/{}/amd64/{}"

    # ensure installation directory exists
    if install_location is None:
        install_location = _get_default_install_location("kubectl")
    install_dir, cli = os.path.dirname(install_location), os.path.basename(
        install_location
    )
    if not os.path.exists(install_dir):
        os.makedirs(install_dir)

    if system == "Windows":
        file_url = base_url.format(client_version, "windows", "kubectl.exe")
    elif system == "Linux":
        # TODO: Support ARM CPU here
        file_url = base_url.format(client_version, "linux", "kubectl")
    elif system == "Darwin":
        file_url = base_url.format(client_version, "darwin", "kubectl")
    else:
        raise InvalidArgumentValueError(
            "Proxy server ({}) does not exist on the cluster.".format(system)
        )

    logger.info('Downloading client to "%s" from "%s"', install_location, file_url)
    try:
        urlretrieve(file_url, install_location)
        os.chmod(
            install_location,
            os.stat(install_location).st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
        )
    except IOError as ex:
        err_msg = "Connection error while attempting to download client ({})".format(ex)
        raise FileOperationError(err_msg) from ex

    if system == "Windows":
        # be verbose, as the install_location is likely not in Windows's search PATHs
        env_paths = os.environ["PATH"].split(";")
        found = next(
            (x for x in env_paths if x.lower().rstrip("\\") == install_dir.lower()),
            None,
        )
        if not found:
            # pylint: disable=logging-format-interpolation
            logger.warning(
                'Please add "{0}" to your search PATH so the `{1}` can be found. 2 options: \n'
                '    1. Run "set PATH=%PATH%;{0}" or "$env:path += \'{0}\'" for PowerShell. '
                "This is good for the current command session.\n"
                "    2. Update system PATH environment variable by following "
                '"Control Panel->System->Advanced->Environment Variables", and re-open the command window. '
                "You only need to do it once".format(install_dir, cli)
            )
    else:
        if not which(cli):
            logger.warning(
                "Please ensure that %s is in your search PATH, so the `%s` command can be found.",
                install_dir,
                cli,
            )
Esempio n. 16
0
def install_kind(_cmd, client_version="v0.10.0", install_location=None, source_url=None):
    """
    Install kind, a container-based Kubernetes environment for development and testing.
    """

    if not source_url:
        source_url = "https://kind.sigs.k8s.io/dl/{}/kind-{}-amd64"

    # ensure installation directory exists
    if install_location is None:
        install_location = _get_default_install_location("kind")
    install_dir, cli = os.path.dirname(install_location), os.path.basename(
        install_location
    )
    if not os.path.exists(install_dir):
        os.makedirs(install_dir)

    file_url = ""
    system = platform.system()
    if system == "Windows":
        file_url = source_url.format(client_version, "windows")
    elif system == "Linux":
        file_url = source_url.format(client_version, "linux")
    elif system == "Darwin":
        file_url = source_url.format(client_version, "darwin")
    else:
        raise InvalidArgumentValueError('System "{}" is not supported by kind.'.format(system))

    logger.info('Downloading client to "%s" from "%s"', install_location, file_url)
    try:
        urlretrieve(file_url, install_location)
        os.chmod(
            install_location,
            os.stat(install_location).st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
        )
    except IOError as ex:
        raise FileOperationError("Connection error while attempting to download client") from ex

    if system == "Windows":
        # be verbose, as the install_location likely not in Windows's search PATHs
        env_paths = os.environ["PATH"].split(";")
        found = next(
            (x for x in env_paths if x.lower().rstrip("\\") == install_dir.lower()),
            None,
        )
        if not found:
            # pylint: disable=logging-format-interpolation
            logger.warning(
                'Please add "{0}" to your search PATH so the `{1}` can be found. 2 options: \n'
                '    1. Run "set PATH=%PATH%;{0}" or "$env:path += \'{0}\'" for PowerShell. '
                "This is good for the current command session.\n"
                "    2. Update system PATH environment variable by following "
                '"Control Panel->System->Advanced->Environment Variables", and re-open the command window. '
                "You only need to do it once".format(install_dir, cli)
            )
    else:
        if not which(cli):
            logger.warning(
                "Please ensure that %s is in your search PATH, so the `%s` command can be found.",
                install_dir,
                cli,
            )
    return install_location