def prepare_playbook_args(ctx):
     playbook_source_path = \
         ctx.instance.runtime_properties.get('playbook_source_path', None)
     playbook_path = \
         ctx.instance.runtime_properties.get('playbook_path', None) \
         or ctx.instance.runtime_properties.get('site_yaml_path', None)
     sources = \
         ctx.instance.runtime_properties.get('sources', {})
     debug_level = \
         ctx.instance.runtime_properties.get('debug_level', 2)
     additional_args = \
         ctx.instance.runtime_properties.get('additional_args', '')
     additional_playbook_files = \
         ctx.instance.runtime_properties.get(
             'additional_playbook_files', None) or []
     ansible_env_vars = \
         ctx.instance.runtime_properties.get('ansible_env_vars', None) \
         or {'ANSIBLE_HOST_KEY_CHECKING': "False"}
     ctx.instance.runtime_properties[WORKSPACE] = tempfile.mkdtemp()
     # check if source path is provided [full path/URL]
     if playbook_source_path:
         # here we will combine playbook_source_path with playbook_path
         playbook_tmp_path = get_shared_resource(playbook_source_path)
         if playbook_tmp_path == playbook_source_path:
             # didn't download anything so check the provided path
             # if file and absolute path
             if os.path.isfile(playbook_tmp_path) and \
                     os.path.isabs(playbook_tmp_path):
                 # check file type if archived
                 file_name = playbook_tmp_path.rsplit('/', 1)[1]
                 file_type = file_name.rsplit('.', 1)[1]
                 if file_type == 'zip':
                     playbook_tmp_path = \
                         unzip_archive(playbook_tmp_path)
                 elif file_type in TAR_FILE_EXTENSTIONS:
                     playbook_tmp_path = \
                         untar_archive(playbook_tmp_path)
         playbook_path = "{0}/{1}".format(playbook_tmp_path, playbook_path)
     else:
         # here will handle the bundled ansible files
         playbook_path = handle_site_yaml(playbook_path,
                                          additional_playbook_files, ctx)
     playbook_args = {
         'playbook_path':
         playbook_path,
         'sources':
         handle_sources(
             sources, playbook_path, ctx,
             ctx.node.properties.get('docker_machine',
                                     {}).get('container_volume', "")),
         'verbosity':
         debug_level,
         'additional_args':
         additional_args or '',
     }
     options_config = \
         ctx.instance.runtime_properties.get('options_config', {})
     run_data = \
         ctx.instance.runtime_properties.get('run_data', {})
     return playbook_args, ansible_env_vars, options_config, run_data
Exemple #2
0
def _unzip_archive(archive_path, target_directory, source_path=None, **_):
    """
    Unzip a zip archive.
    """

    # Create a temporary directory.
    # Create a zip archive object.
    # Extract the object.
    ctx.logger.debug('Unzipping {src} to {dst}.'.format(src=archive_path,
                                                        dst=target_directory))

    src = unzip_archive(archive_path)
    copytree(src, target_directory)
    return target_directory
Exemple #3
0
def _create_source_path(source_tmp_path):
    # didn't download anything so check the provided path
    # if file and absolute path or not
    if not os.path.isabs(source_tmp_path):
        # bundled and need to be downloaded from blueprint
        source_tmp_path = ctx.download_resource(source_tmp_path)

    if os.path.isfile(source_tmp_path):
        file_name = source_tmp_path.rsplit('/', 1)[1]
        file_type = file_name.rsplit('.', 1)[1]
        # check type
        if file_type == 'zip':
            return unzip_archive(source_tmp_path)
        elif file_type in TAR_FILE_EXTENSTIONS:
            return untar_archive(source_tmp_path)

    return source_tmp_path
Exemple #4
0
def install_docker(ctx, **kwargs):
    def dump_to_file(content):
        dump_file = \
            os.path.join(tempfile.mkdtemp(), str(uuid1()))
        with open(dump_file, 'w') as outfile:
            outfile.write(content)
        return dump_file

    # fetch the data needed for installation
    docker_ip, docker_user, docker_key, _ = get_docker_machine_from_ctx(ctx)
    docker_install_url = \
        ctx.node.properties.get('resource_config', {}).get('install_url', "")
    docker_install_script = \
        ctx.node.properties.get(
            'resource_config', {}).get('install_script', "")
    # check if file or content
    final_file = ""  # represent the file path
    if not docker_install_script:
        ctx.logger.error("please check the installation script")
        return
    if not os.path.isfile(docker_install_script):  # not a path / check if URL
        final_file = get_shared_resource(docker_install_script)
        # check if it returns the samething then it is not URL
        if final_file == docker_install_script:  # here we will dump the file
            final_file = dump_to_file(docker_install_script)
    else:
        if os.path.isabs(docker_install_script):  # absolute_file_on_manager
            file_name = docker_install_script.rsplit('/', 1)[1]
            file_type = file_name.rsplit('.', 1)[1]
            if file_type == 'zip':
                final_file = unzip_archive(docker_install_script)
            elif file_type in TAR_FILE_EXTENSTIONS:
                final_file = untar_archive(docker_install_script)

        else:  # could be bundled in the blueprint [relative_path]
            final_file = ctx.download_resource(docker_install_script)
    ctx.logger.info("downloaded the script to {0}".format(final_file))
    # reaching here we should have got a value for the file
    if not final_file:
        raise NonRecoverableError(
            "the installation script is not valid for some reason")
        return

    with get_fabric_settings(ctx, docker_ip, docker_user, docker_key) as s:
        with s:
            docker_installed = False
            output = sudo('which docker')
            ctx.logger.info("output {0}".format(output))
            docker_installed = output is not None \
                and 'no docker' not in output \
                and '/docker' in output
            ctx.logger.info(
                "Is Docker installed ? : {0}".format(docker_installed))
            if not docker_installed:  # docker is not installed
                ctx.logger.info("Installing docker from the provided link")
                put(final_file, "/tmp")
                final_file = final_file.replace(os.path.dirname(final_file),
                                                "/tmp")
                sudo("chmod a+x {0}".format(final_file))
                output = \
                    sudo('curl -fsSL -o get-docker.sh {0}; '
                         'sh get-docker.sh && {1}'.format(
                            docker_install_url, "{0}".format(final_file)))
                ctx.logger.info("Installation output : {0}".format(output))
            else:
                # docker is installed ,
                # we need to check if the api port is enabled
                output = sudo('docker -H tcp://0.0.0.0:2375 ps')
                if 'Is the docker daemon running?' not in output:
                    ctx.logger.info("your docker installation is good to go")
                    return
                else:
                    ctx.logger.info(
                        "your docker installation need to enable API access")
                    return
Exemple #5
0
def prepare_container_files(ctx, **kwargs):

    docker_ip, docker_user, docker_key, _ = get_docker_machine_from_ctx(ctx)
    source = \
        ctx.node.properties.get('resource_config', {}).get('source', "")
    destination = \
        ctx.node.properties.get('resource_config', {}).get('destination', "")
    extra_files = \
        ctx.node.properties.get('resource_config', {}).get('extra_files', {})
    ansible_sources = \
        ctx.node.properties.get(
            'resource_config', {}).get('ansible_sources', {})
    terraform_sources = \
        ctx.node.properties.get(
            'resource_config', {}).get('terraform_sources', {})
    # check source to handle various cases [zip,tar,git]
    source_tmp_path = get_shared_resource(source)
    # check if we actually downloaded something or not
    if source_tmp_path == source:
        # didn't download anything so check the provided path
        # if file and absolute path or not
        if not os.path.isabs(source_tmp_path):
            # bundled and need to be downloaded from blurprint
            source_tmp_path = ctx.download_resource(source_tmp_path)
        if os.path.isfile(source_tmp_path):
            file_name = source_tmp_path.rsplit('/', 1)[1]
            file_type = file_name.rsplit('.', 1)[1]
            # check type
            if file_type == 'zip':
                source_tmp_path = unzip_archive(source_tmp_path)
            elif file_type in TAR_FILE_EXTENSTIONS:
                source_tmp_path = untar_archive(source_tmp_path)

    # Reaching this point we should have got the files into source_tmp_path
    if not destination:
        destination = tempfile.mkdtemp()
    move_files(source_tmp_path, destination)
    shutil.rmtree(source_tmp_path)

    # copy extra files to destination
    for file in extra_files:
        try:
            is_file_path = os.path.exists(file)
            if is_file_path:
                shutil.copy(file, destination)
        except TypeError:
            raise NonRecoverableError("file {0} can't be copied".format(file))

    # handle ansible_sources -Special Case-:
    if ansible_sources:
        hosts_file = os.path.join(destination, HOSTS_FILE_NAME)
        # handle the private key logic
        private_key_val = ansible_sources.get(ANSIBLE_PRIVATE_KEY, "")
        if private_key_val:
            try:
                is_file_path = os.path.exists(private_key_val)
            except TypeError:
                is_file_path = False
            if not is_file_path:
                private_key_file = os.path.join(destination, str(uuid1()))
                with open(private_key_file, 'w') as outfile:
                    outfile.write(private_key_val)
                os.chmod(private_key_file, 0o600)
                ansible_sources.update({ANSIBLE_PRIVATE_KEY: private_key_file})
        else:
            raise NonRecoverableError(
                "Check Ansible Sources, No private key was provided")
        # check if playbook_path was provided or not
        playbook_path = ansible_sources.get(PLAYBOOK_PATH, "")
        if not playbook_path:
            raise NonRecoverableError(
                "Check Ansible Sources, No playbook path was provided")
        hosts_dict = {"all": {"hosts": {"instance": {}}}}
        for key in ansible_sources:
            if key in (CONTAINER_VOLUME, PLAYBOOK_PATH):
                continue
            elif key == ANSIBLE_PRIVATE_KEY:
                # replace docker mapping to container volume
                hosts_dict['all'][HOSTS]['instance'][key] = \
                    ansible_sources.get(key).replace(destination,
                                                     ansible_sources.get(
                                                        CONTAINER_VOLUME))
            else:
                hosts_dict['all'][HOSTS]['instance'][key] = \
                    ansible_sources.get(key)
        with open(hosts_file, 'w') as outfile:
            yaml.safe_dump(hosts_dict, outfile, default_flow_style=False)
        ctx.instance.runtime_properties['ansible_container_command_arg'] = \
            "ansible-playbook -i hosts {0}".format(playbook_path)

    # handle terraform_sources -Special Case-:
    if terraform_sources:
        container_volume = terraform_sources.get(CONTAINER_VOLUME, "")
        # handle files
        storage_dir = terraform_sources.get("storage_dir", "")
        if not storage_dir:
            storage_dir = os.path.join(destination, str(uuid1()))
        else:
            storage_dir = os.path.join(destination, storage_dir)
        os.mkdir(storage_dir)
        # move the downloaded files from source to storage_dir
        move_files(destination, storage_dir)
        # store the runtime property relative to container rather than docker
        storage_dir_prop = storage_dir.replace(destination, container_volume)
        ctx.instance.runtime_properties['storage_dir'] = storage_dir_prop

        # handle plugins
        plugins_dir = terraform_sources.get("plugins_dir", "")
        if not plugins_dir:
            plugins_dir = os.path.join(destination, str(uuid1()))
        else:
            plugins_dir = os.path.join(destination, plugins_dir)
        plugins = terraform_sources.get("plugins", {})
        os.mkdir(plugins_dir)
        for plugin in plugins:
            downloaded_plugin_path = get_shared_resource(plugin)
            if downloaded_plugin_path == plugin:
                # it means we didn't download anything/ extracted
                raise NonRecoverableError(
                    "Check Plugin {0} URL".format(plugin))
            else:
                move_files(downloaded_plugin_path, plugins_dir, 0o775)
        os.chmod(plugins_dir, 0o775)
        # store the runtime property relative to container rather than docker
        plugins_dir = plugins_dir.replace(destination, container_volume)
        ctx.instance.runtime_properties['plugins_dir'] = plugins_dir

        # handle variables
        terraform_variables = terraform_sources.get("variables", {})
        if terraform_variables:
            variables_file = os.path.join(storage_dir, 'vars.json')
            with open(variables_file, 'w') as outfile:
                json.dump(terraform_variables, outfile)
            # store the runtime property relative to container
            # rather than docker
            variables_file = \
                variables_file.replace(destination, container_volume)
            ctx.instance.runtime_properties['variables_file'] = variables_file

        # handle backend
        backend_file = ""
        terraform_backend = terraform_sources.get("backend", {})
        if terraform_backend:
            if not terraform_backend.get("name", ""):
                raise NonRecoverableError(
                    "Check backend {0} name value".format(terraform_backend))
            backend_str = """
                terraform {
                    backend "{backend_name}" {
                        {backend_options}
                    }
                }
            """
            backend_options = ""
            for option_name, option_value in \
                    terraform_backend.get("options", {}).items():
                if isinstance(option_value, basestring):
                    backend_options += "{0} = \"{1}\"".format(
                        option_name, option_value)
                else:
                    backend_options += "{0} = {1}".format(
                        option_name, option_value)
            backend_str.format(backend_name=terraform_backend.get("name"),
                               backend_options=backend_options)
            backend_file = os.path.join(
                storage_dir, '{0}.tf'.format(terraform_backend.get("name")))
            with open(backend_file, 'w') as outfile:
                outfile.write(backend_str)
            # store the runtime property relative to container
            # rather than docker
            backend_file = \
                backend_file.replace(destination, container_volume)
            ctx.instance.runtime_properties['backend_file'] = backend_file

        # handle terraform scripts inside shell script
        terraform_script_file = os.path.join(storage_dir,
                                             '{0}.sh'.format(str(uuid1())))
        terraform_script = """#!/bin/bash -e
terraform init -no-color {backend_file} -plugin-dir={plugins_dir} {storage_dir}
terraform plan -no-color {vars_file} {storage_dir}
terraform apply -no-color -auto-approve {vars_file} {storage_dir}
terraform refresh -no-color {vars_file}
terraform state pull
        """.format(backend_file="" if not backend_file else
                   "-backend-config={0}".format(backend_file),
                   plugins_dir=plugins_dir,
                   storage_dir=storage_dir_prop,
                   vars_file="" if not terraform_variables else
                   " -var-file {0}".format(variables_file))
        ctx.logger.info(
            "terraform_script_file content {0}".format(terraform_script))
        with open(terraform_script_file, 'w') as outfile:
            outfile.write(terraform_script)
        # store the runtime property relative to container
        # rather than docker machine path
        terraform_script_file = \
            terraform_script_file.replace(destination, container_volume)
        ctx.instance.runtime_properties['terraform_script_file'] = \
            terraform_script_file
        ctx.instance.runtime_properties['terraform_container_command_arg'] = \
            "bash {0}".format(terraform_script_file)

    # Reaching this point means we now have everything in this destination
    ctx.instance.runtime_properties['destination'] = destination
    ctx.instance.runtime_properties['docker_host'] = docker_ip
    # copy these files to docker machine if needed at that destination
    if docker_ip not in (LOCAL_HOST_ADDRESSES, get_lan_ip()):
        with get_fabric_settings(ctx, docker_ip, docker_user, docker_key) as s:
            with s:
                destination_parent = destination.rsplit('/', 1)[0]
                if destination_parent != '/tmp':
                    sudo('mkdir -p {0}'.format(destination_parent))
                    sudo("chown -R {0}:{0} {1}".format(docker_user,
                                                       destination_parent))
                put(destination, destination_parent, mirror_local_mode=True)
    def wrapper(playbook_path=None,
                sources=None,
                ctx=ctx_from_import,
                ansible_env_vars=None,
                debug_level=2,
                additional_args=None,
                additional_playbook_files=None,
                site_yaml_path=None,
                save_playbook=False,
                remerge_sources=False,
                playbook_source_path=None,
                **kwargs):
        """Prepare the arguments to send to AnsiblePlaybookFromFile.

        :param site_yaml_path:
            The absolute or relative (blueprint) path to the site.yaml.
        :param sources: Either a path (with the site.yaml).
            Or a YAML dictionary (from the blueprint itself).
        :param ctx: The cloudify context.
        :param ansible_env_vars:
          A dictionary of environment variables to set.
        :param debug_level: Debug level
        :param additional_args: Additional args that you want to use, for
          example, '-c local'.
        :param site_yaml_path: A path to your `site.yaml` or `main.yaml` in
          your Ansible Playbook.
        :param save_playbook: don't remove playbook after action
        :param remerge_sources: update sources on target node
        :param kwargs:
        :return:
        """
        playbook_path = playbook_path or site_yaml_path
        additional_playbook_files = additional_playbook_files or []
        ansible_env_vars = \
            ansible_env_vars or {'ANSIBLE_HOST_KEY_CHECKING': "False"}
        if not sources:
            if remerge_sources:
                # add sources from source node to target node
                sources = get_remerged_config_sources(ctx, kwargs)
            else:
                sources = get_source_config_from_ctx(ctx)

        # store sources in node runtime_properties
        _get_instance(ctx).runtime_properties['sources'] = sources
        _get_instance(ctx).update()

        try:
            create_playbook_workspace(ctx)
            # check if source path is provided [full path/URL]
            if playbook_source_path:
                # here we will combine playbook_source_path with playbook_path
                playbook_tmp_path = get_shared_resource(playbook_source_path)
                if playbook_tmp_path == playbook_source_path:
                    # didn't download anything so check the provided path
                    # if file and absolute path
                    if os.path.isfile(playbook_tmp_path) and \
                            os.path.isabs(playbook_tmp_path):
                        # check file type if archived
                        file_name = playbook_tmp_path.rsplit('/', 1)[1]
                        file_type = file_name.rsplit('.', 1)[1]
                        if file_type == 'zip':
                            playbook_tmp_path = \
                                unzip_archive(playbook_tmp_path)
                        elif file_type in TAR_FILE_EXTENSTIONS:
                            playbook_tmp_path = \
                                untar_archive(playbook_tmp_path)
                playbook_path = "{0}/{1}".format(playbook_tmp_path,
                                                 playbook_path)
            else:
                # here will handle the bundled ansible files
                playbook_path = handle_site_yaml(
                    playbook_path, additional_playbook_files, ctx)
            playbook_args = {
                'playbook_path': playbook_path,
                'sources': handle_sources(sources, playbook_path, ctx),
                'verbosity': debug_level,
                'additional_args': additional_args or '',
                'logger': ctx.logger
            }
            # copy additional params from kwargs
            for field in DIRECT_PARAMS:
                if kwargs.get(field):
                    playbook_args[field] = kwargs[field]

            playbook_args.update(**kwargs)
            func(playbook_args, ansible_env_vars, ctx)
        finally:
            if not save_playbook:
                delete_playbook_workspace(ctx)
def prepare_terraform_files(ctx, **kwargs):

    docker_ip, docker_user, docker_key, container_volume = \
        get_docker_machine_from_ctx(ctx)

    source = \
        ctx.node.properties.get('resource_config', {}).get('source', "")
    backend = \
        ctx.node.properties.get('resource_config', {}).get('backend', {})
    variables = \
        ctx.node.properties.get('resource_config', {}).get('variables', {})
    environment_variables = \
        ctx.node.properties.get('resource_config', {}).get(
            'environment_variables', {})

    terraform_plugins = ctx.node.properties.get('terraform_plugins', [])

    if not source:
        raise NonRecoverableError("Please check the source value")
        return

    destination = tempfile.mkdtemp()

    # handle the provided source
    source_tmp_path = get_shared_resource(source)
    if source_tmp_path == source:
        # didn't download anything so check the provided path
        # if file and relative path to download from blueprint
        if os.path.isfile(source_tmp_path) and \
                not os.path.isabs(source_tmp_path):
            source_tmp_path = ctx.download_resource(source)
        # check file type if archived
        file_name = source_tmp_path.rsplit('/', 1)[1]
        file_type = file_name.rsplit('.', 1)[1]
        if file_type == 'zip':
            source_tmp_path = \
                unzip_archive(source_tmp_path)
        elif file_type in TAR_FILE_EXTENSTIONS:
            source_tmp_path = \
                untar_archive(source_tmp_path)

    storage_dir = "{0}/{1}".format(destination, "storage")
    os.mkdir(storage_dir)

    move_files(source_tmp_path, storage_dir)
    shutil.rmtree(source_tmp_path)

    storage_dir_prop = storage_dir.replace(destination, container_volume)
    ctx.instance.runtime_properties['storage_dir'] = storage_dir_prop

    plugins_dir = "{0}/{1}".format(destination, "plugins")
    os.mkdir(plugins_dir)

    backend_file = ""
    if backend:
        if not backend.get("name", ""):
            raise NonRecoverableError(
                "Check backend {0} name value".format(backend))
        backend_str = """
            terraform {
                backend "{backend_name}" {
                    {backend_options}
                }
            }
        """
        backend_options = ""
        for option_name, option_value in \
                backend.get("options", {}).items():
            if isinstance(option_value, basestring):
                backend_options += "{0} = \"{1}\"".format(
                    option_name, option_value)
            else:
                backend_options += "{0} = {1}".format(option_name,
                                                      option_value)
        backend_str.format(backend_name=backend.get("name"),
                           backend_options=backend_options)
        backend_file = os.path.join(storage_dir,
                                    '{0}.tf'.format(backend.get("name")))
        with open(backend_file, 'w') as outfile:
            outfile.write(backend_str)
        # store the runtime property relative to container
        # rather than docker machine path
        backend_file = \
            backend_file.replace(destination, container_volume)
        ctx.instance.runtime_properties['backend_file'] = backend_file

    variables_file = ""
    if variables:
        variables_file = os.path.join(storage_dir, 'vars.json')
        with open(variables_file, 'w') as outfile:
            json.dump(variables, outfile)
        # store the runtime property relative to container
        # rather than docker machine path
        variables_file = \
            variables_file.replace(destination, container_volume)
        ctx.instance.runtime_properties['variables_file'] = variables_file
    ctx.instance.runtime_properties['environment_variables'] = \
        environment_variables
    if terraform_plugins:
        for plugin in terraform_plugins:
            downloaded_plugin_path = get_shared_resource(plugin)
            if downloaded_plugin_path == plugin:
                # it means we didn't download anything/ extracted
                raise NonRecoverableError(
                    "Check Plugin {0} URL".format(plugin))
            else:
                move_files(downloaded_plugin_path, plugins_dir, 0o775)
        os.chmod(plugins_dir, 0o775)
    plugins_dir = plugins_dir.replace(destination, container_volume)
    ctx.instance.runtime_properties['plugins_dir'] = plugins_dir

    # handle terraform scripts inside shell script
    terraform_script_file = os.path.join(storage_dir,
                                         '{0}.sh'.format(str(uuid1())))
    terraform_script = """#!/bin/bash -e
terraform init -no-color {backend_file} -plugin-dir={plugins_dir} {storage_dir}
terraform plan -no-color {vars_file} {storage_dir}
terraform apply -no-color -auto-approve {vars_file} {storage_dir}
terraform refresh -no-color {vars_file}
terraform state pull
    """.format(backend_file="" if not backend_file else
               "-backend-config={0}".format(backend_file),
               plugins_dir=plugins_dir,
               storage_dir=storage_dir_prop,
               vars_file=""
               if not variables else " -var-file {0}".format(variables_file))
    ctx.logger.info(
        "terraform_script_file content {0}".format(terraform_script))
    with open(terraform_script_file, 'w') as outfile:
        outfile.write(terraform_script)
    # store the runtime property relative to container
    # rather than docker machine path
    terraform_script_file = \
        terraform_script_file.replace(destination, container_volume)
    ctx.instance.runtime_properties['terraform_script_file'] = \
        terraform_script_file
    ctx.instance.runtime_properties['terraform_container_command_arg'] = \
        "bash {0}".format(terraform_script_file)

    # Reaching this point means we now have everything in this destination
    ctx.instance.runtime_properties['destination'] = destination
    ctx.instance.runtime_properties['docker_host'] = docker_ip
    # copy these files to docker machine if needed at that destination
    if docker_ip not in LOCAL_HOST_ADDRESSES and not docker_ip == get_lan_ip():
        with get_fabric_settings(ctx, docker_ip, docker_user, docker_key) as s:
            with s:
                destination_parent = destination.rsplit('/', 1)[0]
                if destination_parent != '/tmp':
                    sudo('mkdir -p {0}'.format(destination_parent))
                    sudo("chown -R {0}:{0} {1}".format(docker_user,
                                                       destination_parent))
                put(destination, destination_parent, mirror_local_mode=True)