예제 #1
0
def upload(path,
           imagestore_string='fabric:ImageStore',
           show_progress=False,
           timeout=300):  # pylint: disable=too-many-locals,missing-docstring

    from sfctl.config import (client_endpoint, no_verify_setting, ca_cert_info,
                              cert_info)
    import requests

    abspath = validate_app_path(path)
    basename = os.path.basename(abspath)

    endpoint = client_endpoint()
    cert = cert_info()
    ca_cert = True
    if no_verify_setting():
        ca_cert = False
    elif ca_cert_info():
        ca_cert = ca_cert_info()

    if all([no_verify_setting(), ca_cert_info()]):
        raise CLIError('Cannot specify both CA cert info and no verify')

    # Note: pressing ctrl + C during upload does not end the current upload in progress, but only
    # stops the next one from occurring. This will be fixed in the future.

    # Upload to either to a folder, or native image store only
    if 'file:' in imagestore_string:
        dest_path = path_from_imagestore_string(imagestore_string)

        process = Process(target=upload_to_fileshare,
                          args=(abspath, os.path.join(dest_path, basename),
                                show_progress))

        process.start()
        process.join(timeout)  # If timeout is None then there is no timeout.

        if process.is_alive():
            process.terminate(
            )  # This will leave any children of process orphaned.
            raise SFCTLInternalException(
                'Upload has timed out. Consider passing a longer '
                'timeout duration.')

    elif imagestore_string == 'fabric:ImageStore':

        with requests.Session() as sesh:
            sesh.verify = ca_cert
            sesh.cert = cert

            # There is no need for a new process here since
            upload_to_native_imagestore(sesh, endpoint, abspath, basename,
                                        show_progress, timeout)

    else:
        raise CLIError('Unsupported image store connection string')
예제 #2
0
def upload_single_file_native_imagestore(sesh, endpoint, basename, #pylint: disable=too-many-locals,too-many-arguments
                                         rel_path, single_file, root, target_timeout):
    """
    Used by upload_to_native_imagestore to upload individual files
    of the application package to cluster

    :param sesh: A requests (module) session object.
    :param endpoint: Connection url endpoint for upload requests.
    :param basename: Image store base path.
    :param rel_path: Image store relative directory path.
    :param single_file: Filename.
    :param root: Source directory path.
    :param target_timeout: Time at which timeout would be reached.
    """
    try:
        from urllib.parse import urlparse, urlencode, urlunparse
    except ImportError:
        from urllib import urlencode
        from urlparse import urlparse, urlunparse  # pylint: disable=import-error

    current_time_left = get_timeout_left(target_timeout)   # an int representing seconds

    if current_time_left == 0:
        raise SFCTLInternalException('Upload has timed out. Consider passing a longer '
                                        'timeout duration.')

    url_path = (
        os.path.normpath(os.path.join('ImageStore', basename,
                                        rel_path, single_file))
    ).replace('\\', '/')
    fp_norm = os.path.normpath(os.path.join(root, single_file))
    with open(fp_norm, 'rb') as file_opened:
        url_parsed = list(urlparse(endpoint))
        url_parsed[2] = url_path
        url_parsed[4] = urlencode(
            {'api-version': '6.1',
                'timeout': current_time_left})
        url = urlunparse(url_parsed)

        # timeout is (connect_timeout, read_timeout)
        res = sesh.put(url, data=file_opened,
                        timeout=(get_lesser(60, current_time_left), current_time_left))

        res.raise_for_status()
def sfctl_cluster_version_matches(cluster_version, sfctl_version):
    """
    Check if the sfctl version and the cluster version is compatible with each other.

    :param cluster_version: str representing the cluster runtime version of the connected cluster
    :param sfctl_version: str representing this sfctl distribution version
    :return: True if they are a match. False otherwise.
    """

    if sfctl_version in ['11.0.0']:

        return cluster_version.startswith('7.2')

    # If we forget to update this code before a new release, the tests which call this method
    # will fail.
    raise SFCTLInternalException(
        str.format(
            'Invalid sfctl version {0} provided for check against cluster version {1}.',
            sfctl_version, cluster_version))
예제 #4
0
def upload(
        path,
        imagestore_string='fabric:ImageStore',
        show_progress=False,
        timeout=300,  # pylint: disable=too-many-locals,missing-docstring,too-many-arguments,too-many-branches,too-many-statements
        compress=False,
        keep_compressed=False,
        compressed_location=None):

    from sfctl.config import (client_endpoint, no_verify_setting, ca_cert_info,
                              cert_info)
    import requests

    path = _normalize_path(path)
    if compressed_location is not None:
        compressed_location = _normalize_path(compressed_location)

    abspath = validate_app_path(path)
    basename = os.path.basename(abspath)

    endpoint = client_endpoint()
    cert = cert_info()
    ca_cert = True
    if no_verify_setting():
        ca_cert = False
    elif ca_cert_info():
        ca_cert = ca_cert_info()

    if all([no_verify_setting(), ca_cert_info()]):
        raise CLIError('Cannot specify both CA cert info and no verify')

    if not compress and (keep_compressed or compressed_location is not None):
        raise CLIError(
            '--keep-compressed and --compressed-location options are only applicable '
            'if the --compress option is set')

    compressed_pkg_location = None
    created_dir_path = None

    if compress:

        parent_folder = os.path.dirname(path)
        file_or_folder_name = os.path.basename(path)

        compressed_pkg_location = os.path.join(parent_folder,
                                               'sfctl_compressed_temp')

        if compressed_location is not None:
            compressed_pkg_location = compressed_location

        # Check if a zip file has already been created
        created_dir_path = os.path.join(compressed_pkg_location,
                                        file_or_folder_name)

        if os.path.exists(created_dir_path):
            if get_user_confirmation(
                    str.format(
                        'Deleting previously generated compressed files at '
                        '{0}. If this folder has anything else, those will be '
                        'deleted as well. Allow? ["y", "n"]: ',
                        created_dir_path)):
                shutil.rmtree(created_dir_path)
            else:
                # We can consider adding an option to number the packages in the future.
                print(
                    'Stopping upload operation. Cannot compress to the following location '
                    'because the path already exists: ' + created_dir_path)
                return

        # Let users know where to find the compressed app package before starting the
        # copy / compression, in case the process crashes in the middle, so users
        # will know where to clean up items from, or where to upload already compressed
        # app packages from
        if show_progress:
            print('Starting package compression into location: ' +
                  compressed_pkg_location)
            print()  # New line for formatting purposes
        compress_package(path, compressed_pkg_location)

        # Change the path to the path with the compressed package
        compressed_path = os.path.join(compressed_pkg_location,
                                       file_or_folder_name)

        # re-do validation and reset the variables
        abspath = validate_app_path(compressed_path)
        basename = os.path.basename(abspath)

    # Note: pressing ctrl + C during upload does not end the current upload in progress, but only
    # stops the next one from occurring. This will be fixed in the future.

    # Upload to either to a folder, or native image store only
    if 'file:' in imagestore_string:
        dest_path = path_from_imagestore_string(imagestore_string)

        process = Process(target=upload_to_fileshare,
                          args=(abspath, os.path.join(dest_path, basename),
                                show_progress))

        process.start()
        process.join(timeout)  # If timeout is None then there is no timeout.

        if process.is_alive():
            process.terminate(
            )  # This will leave any children of process orphaned.
            raise SFCTLInternalException(
                'Upload has timed out. Consider passing a longer '
                'timeout duration.')

    elif imagestore_string == 'fabric:ImageStore':

        with requests.Session() as sesh:
            sesh.verify = ca_cert
            sesh.cert = cert

            # There is no need for a new process here since
            upload_to_native_imagestore(sesh, endpoint, abspath, basename,
                                        show_progress, timeout)

    else:
        raise CLIError(
            'Unsupported image store connection string. Value should be either '
            '"fabric:ImageStore", or start with "file:"')

    # If code has reached here, it means that upload was successful
    # To reach here, user must have agreed to clear this folder or exist the API
    # So we can safely delete the contents
    # User is expected to not create a folder by the same name during the upload duration
    # If needed, we can consider adding our content under a GUID in the future
    if compress and not keep_compressed:
        # Remove the generated files
        if show_progress:
            print('Removing generated folder ' + created_dir_path)
        shutil.rmtree(created_dir_path)
예제 #5
0
def upload_to_native_imagestore(
        sesh,
        endpoint,
        abspath,
        basename,  #pylint: disable=too-many-locals,too-many-arguments
        show_progress,
        timeout):
    """
    Upload the application package to cluster

    :param sesh: A requests (module) session object.
    """

    try:
        from urllib.parse import urlparse, urlencode, urlunparse
    except ImportError:
        from urllib import urlencode
        from urlparse import urlparse, urlunparse  # pylint: disable=import-error
    total_files_count = 0
    current_files_count = 0
    for root, _, files in os.walk(abspath):
        # Number of uploads is number of files plus number of directories
        total_files_count += (len(files) + 1)

    target_timeout = int(time()) + timeout

    # Note: while we are raising some exceptions regarding upload timeout, we are leaving the
    # timeouts raised by the requests library as is since it contains enough information
    for root, _, files in os.walk(abspath):
        rel_path = os.path.normpath(os.path.relpath(root, abspath))
        for single_file in files:

            current_time_left = get_timeout_left(
                target_timeout)  # an int representing seconds

            if current_time_left == 0:
                raise SFCTLInternalException(
                    'Upload has timed out. Consider passing a longer '
                    'timeout duration.')

            url_path = (os.path.normpath(
                os.path.join('ImageStore', basename, rel_path,
                             single_file))).replace('\\', '/')
            fp_norm = os.path.normpath(os.path.join(root, single_file))
            with open(fp_norm, 'rb') as file_opened:
                url_parsed = list(urlparse(endpoint))
                url_parsed[2] = url_path
                url_parsed[4] = urlencode({
                    'api-version': '6.1',
                    'timeout': current_time_left
                })
                url = urlunparse(url_parsed)

                # timeout is (connect_timeout, read_timeout)
                res = sesh.put(url,
                               data=file_opened,
                               timeout=(get_lesser(60, current_time_left),
                                        current_time_left))

                res.raise_for_status()
                current_files_count += 1
                print_progress(
                    current_files_count, total_files_count,
                    os.path.normpath(os.path.join(rel_path, single_file)),
                    show_progress, get_timeout_left(target_timeout))

        current_time_left = get_timeout_left(target_timeout)

        if current_time_left == 0:
            raise SFCTLInternalException(
                'Upload has timed out. Consider passing a longer '
                'timeout duration.')

        url_path = (os.path.normpath(
            os.path.join('ImageStore', basename, rel_path,
                         '_.dir'))).replace('\\', '/')
        url_parsed = list(urlparse(endpoint))
        url_parsed[2] = url_path
        url_parsed[4] = urlencode({
            'api-version': '6.1',
            'timeout': current_time_left
        })
        url = urlunparse(url_parsed)

        res = sesh.put(url,
                       timeout=(get_lesser(60, current_time_left),
                                current_time_left))
        res.raise_for_status()
        current_files_count += 1
        print_progress(current_files_count, total_files_count,
                       os.path.normpath(os.path.join(rel_path, '_.dir')),
                       show_progress, get_timeout_left(target_timeout))
    if show_progress:
        print('Complete', file=sys.stderr)
예제 #6
0
def provision_application_type(
        client,  #pylint: disable=too-many-locals,invalid-name,too-many-arguments
        external_provision=False,
        no_wait=False,
        application_type_build_path=None,
        application_package_download_uri=None,
        application_type_name=None,
        application_type_version=None,
        timeout=60):
    """Provisions or registers a Service Fabric application type with the
        cluster using the .sfpkg package in the external store or using the
        application package in the image store.
    """

    from azure.servicefabric.models import (
        ProvisionApplicationTypeDescription,
        ExternalStoreProvisionApplicationTypeDescription, FabricErrorException)

    provision_description = None

    # Validate inputs
    if external_provision:
        if application_type_build_path:
            raise CLIError(
                'application-type-build-path should not be specified for external provision.'
            )

        if not all([
                application_package_download_uri, application_type_name,
                application_type_version
        ]):
            raise CLIError(
                'Missing required parameters. The following are required: '
                '--application-package-download-uri, --application-type-name, '
                '--application-type-version.')
        provision_description = ExternalStoreProvisionApplicationTypeDescription(
            async_property=no_wait,
            application_package_download_uri=application_package_download_uri,
            application_type_name=application_type_name,
            application_type_version=application_type_version)
    else:
        if not application_type_build_path:
            raise CLIError('Missing required parameter '
                           '--application-type-build-path.')

        if any([
                application_package_download_uri, application_type_name,
                application_type_version
        ]):
            raise CLIError(
                'The following are should not be specified for image store provision: '
                '--application-package-download-uri, --application-type-name, '
                '--application-type-version.')

        provision_description = ProvisionApplicationTypeDescription(
            async_property=no_wait,
            application_type_build_path=application_type_build_path)

    api_version = "6.2"

    # Construct URLs
    url = '/ApplicationTypes/$/Provision'

    # Construct parameters
    query_parameters = {}
    query_parameters['api-version'] = client._serialize.query(
        "api_version", api_version, 'str')

    query_parameters['timeout'] = client._serialize.query("timeout",
                                                          timeout,
                                                          'long',
                                                          maximum=4294967295,
                                                          minimum=1)

    # Construct headers
    header_parameters = {}
    header_parameters['Content-Type'] = 'application/json; charset=utf-8'

    # Construct body
    body_content = None
    if not external_provision:
        body_content = client._serialize.body(
            provision_description, 'ProvisionApplicationTypeDescription')
    else:
        body_content = client._serialize.body(
            provision_description,
            'ExternalStoreProvisionApplicationTypeDescription')

    # Create a new sorted dictionary since we don't have move_to_end in python 2
    body_content_sorted = OrderedDict([('Kind', body_content['Kind'])])
    for key in body_content:
        if key != 'Kind':
            body_content_sorted[key] = body_content[key]

    if list(body_content_sorted.keys())[0] != "Kind":
        raise SFCTLInternalException(
            'provision_application_type: Kind must be the first item to be serialized.'
        )

    # Construct and send request
    request = client._client.post(url, query_parameters)
    response = client._client.send(request, header_parameters,
                                   body_content_sorted)

    if response.status_code not in [200, 202]:
        raise FabricErrorException(client._deserialize, response)
예제 #7
0
def upload_to_native_imagestore(sesh, endpoint, abspath, basename, #pylint: disable=too-many-locals,too-many-arguments
                                show_progress, timeout):
    """
    Upload the application package to cluster

    :param sesh: A requests (module) session object.
    :param endpoint: Connection url endpoint for upload requests.
    :param abspath: Application source path.
    :param basename: Image store destination path.
    :param show_progress: boolean to determine whether to log upload progress.
    :param timeout: Total upload timeout in seconds.
    """

    try:
        from urllib.parse import urlparse, urlencode, urlunparse
    except ImportError:
        from urllib import urlencode
        from urlparse import urlparse, urlunparse  # pylint: disable=import-error
    total_files_count = 0
    current_files_count = 0
    for root, _, files in os.walk(abspath):
        # Number of uploads is number of files plus number of directories
        total_files_count += (len(files) + 1)

    target_timeout = int(time()) + timeout
    jobcount = get_job_count()

    # Note: while we are raising some exceptions regarding upload timeout, we are leaving the
    # timeouts raised by the requests library as is since it contains enough information
    for root, _, files in os.walk(abspath):
        rel_path = os.path.normpath(os.path.relpath(root, abspath))
        filecount = len(files)

        if show_progress:
            progressdescription = 'Uploading path: {}'.format(rel_path)
            with tqdm_joblib(tqdm(desc=progressdescription, total=filecount)):
                Parallel(n_jobs=jobcount)(
                    delayed(upload_single_file_native_imagestore)(
                        sesh, endpoint, basename, rel_path, single_file, root, target_timeout)
                        for single_file in files)
        else:
            Parallel(n_jobs=jobcount)(
                delayed(upload_single_file_native_imagestore)(
                    sesh, endpoint, basename, rel_path, single_file, root, target_timeout)
                    for single_file in files)

        current_time_left = get_timeout_left(target_timeout)

        if current_time_left == 0:
            raise SFCTLInternalException('Upload has timed out. Consider passing a longer '
                                         'timeout duration.')

        url_path = (
            os.path.normpath(os.path.join('ImageStore', basename,
                                          rel_path, '_.dir'))
        ).replace('\\', '/')
        url_parsed = list(urlparse(endpoint))
        url_parsed[2] = url_path
        url_parsed[4] = urlencode({'api-version': '6.1',
                                   'timeout': current_time_left})
        url = urlunparse(url_parsed)

        res = sesh.put(url,
                       timeout=(get_lesser(60, current_time_left), current_time_left))
        res.raise_for_status()
        current_files_count += filecount + 1
        print_progress(current_files_count, total_files_count,
                       os.path.normpath(os.path.join(rel_path, '_.dir')),
                       show_progress, get_timeout_left(target_timeout))
    if show_progress:
        print('Complete', file=sys.stderr)