示例#1
0
    def __init__(self,
                 ctx,
                 fetch_type,
                 src,
                 dest,
                 cache_dir,
                 root='',
                 root_ws='',
                 ref=None):
        super(FetchHttp, self).__init__(ctx,
                                        fetch_type,
                                        dest,
                                        root=root,
                                        root_ws=root_ws,
                                        ref=ref)

        self.cache_dir = os.path.join(cache_dir, "http")
        if not os.path.exists(self.cache_dir):
            os.makedirs(self.cache_dir)

        try:
            r = requests.get(src)
            r.raise_for_status()
        except requests.exceptions.HTTPError:
            raise LinchpinError("An HTTP error occurred")
        except requests.exceptions.RequestException:
            raise LinchpinError("Could not connect to given URL")

        if r.status_code != 200:
            raise LinchpinError("The entered url is invalid")

        self.src = src.rstrip('/')
示例#2
0
    def do_validation(self, provision_data, old_schema=False):
        """
        This function takes provision_data, and attempts to validate the
        topologies for that data

        :param provision_data: PinFile data as a dictionary, with
        target information
        """

        results = {}

        return_code = 0

        for target in provision_data.keys():
            if not isinstance(provision_data[target], dict):
                raise LinchpinError("Target '{0}' does not"
                                    " exist.".format(target))

        targets = [x.lower() for x in provision_data.keys()]
        if 'linchpin' in targets:
            raise LinchpinError("Target 'linchpin' is not allowed.")

        for target in provision_data.keys():
            self.ctx.log_debug("Processing target: {0}".format(target))

            results[target] = {}
            self.set_evar('target', target)

            validator = Validator(self.ctx, self.pb_path, self.pb_ext)
            target_dict = provision_data[target]
            ret, results[target] = validator.validate_pretty(
                target_dict, target, old_schema)
            return_code += ret

        return return_code, results
示例#3
0
    def _invoke_playbooks(self, resources={}, action='up', console=True,
                          providers=[]):
        """
        Uses the Ansible API code to invoke the specified linchpin playbook

        :param resources: dict of resources to provision
        :param action: Which ansible action to run (default: 'up')
        :param console: Whether to display the ansible console (default: True)
        """

        return_code = 0
        results = []

        self.set_evar('_action', action)
        self.set_evar('state', 'present')

        if action == 'setup' or action == 'ask_sudo_setup':
            self.set_evar('setup_providers', providers)
            return_code, res = self._find_n_run_pb(action,
                                                   "localhost",
                                                   console=console)
            if res:
                results.append(res)
            if not len(results):
                results = None
            return (return_code, results)

        if action == 'destroy':
            self.set_evar('state', 'absent')

        inventory_src = '{0}/localhost'.format(self.workspace)

        for resource in resources:
            self.set_evar('resources', resource)
            playbook = resource.get('resource_group_type')
            return_code, res = self._find_n_run_pb(playbook,
                                                   inventory_src,
                                                   console=console)
            if action == "up" and (return_code > 0 and
                                   not isinstance(return_code, str)):
                if self.ctx.verbosity > 0:
                    raise LinchpinError("Unsuccessful provision of resource")
                else:
                    res_grp_name = resource['resource_group_name']
                    msg = res['failed'][0]._result['msg']
                    task = res['failed'][0].task_name
                    raise LinchpinError("Unable to provision resource group "
                                        "'{0}' due to '{1}' at task "
                                        " '{2}'".format(res_grp_name,
                                                        msg, task))
                sys.exit(return_code)

            if res:
                results.append(res)

        if not len(results):
            results = None

        return (return_code, results)
示例#4
0
    def __init__(self, ctx, fetch_type, src, dest, cache_dir, root):
        super(FetchLocal, self).__init__(ctx, fetch_type, dest, root)

        self.cache_dir = os.path.join(cache_dir, "local")
        if not os.path.exists(self.cache_dir):
            os.mkdir(self.cache_dir)

        src_parse = urlparse.urlparse(src)
        self.src = os.path.abspath(
            os.path.join(src_parse.netloc, src_parse.path))

        if not os.path.exists(self.src):
            raise LinchpinError('{0} is not a valid path'.format(src))
        if os.path.samefile(self.src, self.dest):
            raise LinchpinError("Provide two different locations")
示例#5
0
    def call_wget(self, fetch_dir=None):

        src_w_root = '{0}/{1}'.format(self.src, self.root)
        tempdir = None

        # globs to reject
        rej = '*html*'


        if not fetch_dir:
            fetch_dir = tempfile.mkdtemp(prefix="http_", dir=self.cache_dir)

        wget_args = ['wget', '-r', '-np', '-nH', '-q', '--reject',
                     rej, src_w_root, '-P', fetch_dir]

        retval = subprocess.call(wget_args)



        if retval != 0:
            try:
                os.rmdir(tempdir)
            except OSError:
                pass
            raise LinchpinError('Unable to fetch files with the following'
                                ' command:\n{0}'.format(" ".join(wget_args)))
        return fetch_dir
示例#6
0
    def call_wget(self, src, fetch_dir=None):
        list_args = src.split('/')
        list_args = list_args[3:]
        tempdir = None

        if fetch_dir is None:
            tempdir = tempfile.mkdtemp(prefix="http_", dir=self.cache_dir)
            wget_args = ['wget', '-r', '-np', '-nH', '-q', '--reject', 'html',
                         '--cut-dirs={0}'.format(len(list_args)),
                         src, '-P', tempdir]
        else:
            tempdir = fetch_dir
            wget_args = ['wget', '-r', '-np', '-N', '-nH', '-q', '--reject',
                         'html', '--cut-dirs={0}'.format(len(list_args)), src,
                         '-P', tempdir]

        retval = subprocess.call(wget_args)

        if retval != 0:
            try:
                os.rmdir(tempdir)
            except OSError:
                pass
            raise LinchpinError('Unable to fetch files with the following'
                                ' command:\n{0}'.format(" ".join(wget_args)))
        return tempdir
示例#7
0
    def find_include(self, filename, ftype='topology'):
        """
        Find the included file to be acted upon.

        :param filename:
            name of file from to be loaded

        :param ftype:
            the file type to locate: topology, layout
            (default: topology)

        """
        folder = self.get_evar('topologies_folder', 'topologies')
        if ftype == 'layout':
            folder = self.get_evar('layouts_folder', 'layouts')
        elif ftype == 'hooks':
            folder = self.get_evar('hooks_folder', 'hooks')

        path = os.path.realpath('{0}/{1}'.format(self.workspace, folder))
        files = os.listdir(path)

        if filename in files:
            return os.path.realpath('{0}/{1}'.format(path, filename))

        raise LinchpinError('{0} not found in' ' workspace'.format(filename))
示例#8
0
    def call_clone(self, fetch_dir=None):

        ref = None
        src = self.src
        if self.ref:
            ref = self.ref
            src = '{0}@{1}'.format(self.src, ref)

        if fetch_dir and os.path.exists(fetch_dir):
            cmd = ['git', 'pull', '--quiet']
            retval = subprocess.call(cmd,
                                     stdout=subprocess.PIPE,
                                     stderr=subprocess.PIPE,
                                     cwd=fetch_dir)
        else:
            if not fetch_dir:
                fetch_dir = tempfile.mkdtemp(prefix="git_", dir=self.cache_dir)

            cmd = ['git', 'clone', '--quiet', self.src]
            if ref:
                cmd.extend(['-b', ref])

            cmd.append(fetch_dir)

            retval = subprocess.call(cmd,
                                     stdout=subprocess.PIPE,
                                     stderr=subprocess.PIPE)

        if retval != 0:
            raise LinchpinError("Unable to clone {0}".format(src))
        return fetch_dir
示例#9
0
    def copy_dir(self, src, dest):
        for root, dirs, files in os.walk(src):
            files = [f for f in files if not f[0] == '.']
            dirs[:] = [d for d in dirs if not d[0] == '.']
            if not os.path.isdir(root):
                os.makedirs(root)
            for file in files:
                rel_path = root.replace(src, '').lstrip(os.sep)
                dest_path = os.path.join(dest, rel_path)

                if not os.path.isdir(dest_path):
                    try:
                        os.makedirs(dest_path)
                    except OSError as e:
                        if e.errno == 17:
                            raise LinchpinError(
                                'File {0} already exists'
                                ' in destination directory'.format(e.filename))

                s = os.path.join(root, file)
                d = os.path.join(dest_path, file)

                if (not os.path.exists(d)) or (
                        os.stat(d).st_mtime - os.stat(dest).st_mtime > 1):
                    try:
                        if os.path.islink(s) and os.path.exists(
                                os.readlink(s)):
                            linkto = os.readlink(s)
                            os.symlink(linkto, d)
                        else:
                            shutil.copy2(s, d)
                    except (IOError, OSError):
                        pass
def get_driver(provider):

    if provider not in filter_classes:
        raise LinchpinError("Key {0} not found in"
                            " inventory provider dict".format(provider))

    return filter_classes[provider]
示例#11
0
    def validate_topology_highlevel(self, topo_data):
        """
        validate the higher-level components of the topology

        These are not specific to the provider and must be validated separately
        from the items within each resource group

        :param topo_data topology data from the pinfile
        """

        role_path = self._find_role_path("common")
        try:
            sp = "{0}/files/topo-schema.json".format(role_path)
            schema = json.load(open(sp))
        except Exception as e:
            raise LinchpinError("Error with schema: '{0}'"
                                " {1}".format(sp, e))

        document = {'topology': topo_data}
        v = AnyofValidator(schema, error_handler=ValidationErrorHandler)

        if not v.validate(document):
            try:
                err = self._gen_error_msg("", "", v.errors)
                raise TopologyError(err)
            except NotImplementedError:
                # we shouldn't have this issue using cererus >= 1.2, but
                # this is here just in case an older version has to be used
                self.ctx.log_state("There was an error validating your schema,\
                      but we can't seem to format it for you")
                self.ctx.log_state("Here's the raw error data in case you want\
                      to go through it by hand:")
                self.ctx.log_state(v._errors)
    def _parse_config(self, path):
        """
        Parse configs into the self.cfgs dict from provided path.

        :param path: A path to a config to parse
        """

        try:
            config = ConfigParser.ConfigParser()
            f = open(path)
            config.read_file(f)
            f.close()

            for section in config.sections():
                if not self.cfgs.get(section):
                    self.cfgs[section] = {}

                for k in config.options(section):
                    if section == 'evars':
                        try:
                            self.cfgs[section][k] = (config.getboolean(
                                section, k))
                        except ValueError:
                            self.cfgs[section][k] = config.get(section, k)
                    else:
                        try:
                            self.cfgs[section][k] = config.get(section, k)
                        except ConfigParser.InterpolationMissingOptionError:
                            value = config.get(section, k, raw=True)
                            self.cfgs[section][k] = value.replace('%%', '%')

        except ConfigParser.InterpolationSyntaxError as e:
            raise LinchpinError('Unable to parse configuration file properly:'
                                ' {0}'.format(e))
示例#13
0
    def _get_pinfile_path(self, exists=True):
        """
        This function finds the self.pinfile. If the file is a full path,
        it is expanded and used. If not found, the lp.default_pinfile
        configuration value is used for the pinfile, the workspace is
        prepended and returned.

        :param exists:
            Whether the pinfile is supposed to already exist (default: True)
        """

        if not self.pinfile:
            self.pinfile = self.get_cfg('lp',
                                        'default_pinfile',
                                        default='PinFile')

        pf_w_path = os.path.realpath(os.path.expanduser(self.pinfile))
        if self.workspace:
            pf_w_path = '{0}/{1}'.format(self.workspace, self.pinfile)

        # Ensure a PinFile path will exist
        if not os.path.exists(pf_w_path) and exists:
            pf_w_path = '{0}/{1}'.format(self.workspace, self.pinfile)

        # If the PinFile doesn't exist, raise an error
        if not os.path.exists(pf_w_path) and exists:
            raise LinchpinError('{0} not found. Please check that it'
                                ' exists and try again'.format(pf_w_path))

        return pf_w_path
示例#14
0
    def _find_playbook_path(self, playbook):
        p = '{0}/{1}'.format(self.pb_path, playbook)

        if os.path.exists(os.path.expanduser(p)):
            return self.pb_path

        raise LinchpinError("playbook '{0}' not found in"
                            " path: {1}".format(playbook, self.pb_path))
示例#15
0
    def write_config_file(self, path):

        try:
            with open(path, 'a+') as f:
                self.parser.write(f)
        except Exception as e:
            raise LinchpinError('Unable to write configuration file:'
                                ' {0}'.format(e))
示例#16
0
    def lp_fetch(self, src, root=None, fetch_type='workspace'):
        if root is not None:
            root = list(filter(None, root.split(',')))

        dest = self.workspace
        if not os.path.exists(dest):
            raise LinchpinError(dest + " does not exist")

        fetch_aliases = {
            "topologies": self.get_evar("topologies_folder"),
            "layouts": self.get_evar("layouts_folder"),
            "resources": self.get_evar("resources_folder"),
            "hooks": self.get_evar("hooks_folder"),
            "workspace": "workspace"
        }

        fetch_dir = fetch_aliases.get(fetch_type, "workspace")


        cache_path = os.path.abspath(os.path.join(os.path.expanduser('~'),
                                                  '.cache/linchpin'))
        if not os.path.exists(cache_path):
            os.makedirs(cache_path)

        protocol_regex = OrderedDict([
            ('((git|ssh|http(s)?)|(git@[\w\.]+))'
                '(:(//)?)([\w\.@\:/\-~]+)(\.git)(/)?',
                'FetchGit'),
            ('^(http|https)://', 'FetchHttp'),
            ('^(file)://', 'FetchLocal')
        ])
        fetch_protocol = None
        for regex, obj in protocol_regex.items():
            if re.match(regex, src):
                fetch_protocol = obj
                break
        if fetch_protocol is None:
            raise LinchpinError("The protocol speficied is not supported")


        fetch_class = FETCH_CLASS[fetch_protocol](self.ctx, fetch_dir, src,
                                                  dest, cache_path, root)
        fetch_class.fetch_files()
        fetch_class.copy_files()
示例#17
0
def yaml2json(pf):

    """ parses yaml file into json object """

    with open(pf, 'r') as stream:
        try:
            pf = yaml.load(stream)
            return pf
        except yaml.YAMLError as exc:
            raise LinchpinError(exc)
示例#18
0
    def copy_dir(self, src, dest):

        for root, dirs, files in os.walk(src):
            files = [f for f in files if not f[0] == '.']
            dirs[:] = [d for d in dirs if not d[0] == '.']
            if not os.path.isdir(root):
                os.makedirs(root)
            for f in files:
                rel_path = root.replace(src, '').lstrip(os.sep)
                dest_path = os.path.join(dest, rel_path)

                if not os.path.isdir(dest_path):
                    try:
                        os.makedirs(dest_path)
                    except OSError as e:
                        if e.errno == 17:
                            raise LinchpinError('File {0} already exists'
                                                ' in destination directory'
                                                .format(e.filename))

                s_file = os.path.join(root, f)
                d_file = os.path.join(dest_path, f)

                # fetch.always_update_workspace flag determines whether or
                # not to update. can be overwritten on the cli with --nocache.
                cache_ws = (ast.literal_eval(
                            self.ctx.get_cfg('fetch', 'cache_ws',
                                             default='True')))

                cp_files = False
                if not cache_ws:
                    cp_files = True
                else:
                    if not os.path.exists(d_file):
                        cp_files = True
                    else:
                        cache_days = int(self.ctx.get_cfg('fetch',
                                                          'cache_days',
                                                          default=1))
                        s_file_mtime = int(os.stat(s_file).st_mtime)
                        d_file_mtime = int(os.stat(d_file).st_mtime)

                        if (s_file_mtime - d_file_mtime) >= cache_days:
                            cp_files = True

                if cp_files:

                    try:
                        if (os.path.islink(s_file) and
                                os.path.exists(os.readlink(s_file))):
                            s_file = os.readlink(s_file)

                        shutil.copy2(s_file, d_file)
                    except (IOError, OSError) as e:
                        self.ctx.log_state(e)
示例#19
0
    def set_flag_no_hooks(self, flag):
        """
        set_flag_no_hooks sets no_hooks flag

        param: flag: boolean

        returns boolean
        """

        if isinstance(flag, bool):
            return self.context.set_cfg("hookflags", "no_hooks", flag)
        raise LinchpinError("Incorrect datatype please use boolean")
示例#20
0
    def _invoke_playbooks(self,
                          resources={},
                          action='up',
                          console=True,
                          providers=[]):
        """
        Uses the Ansible API code to invoke the specified linchpin playbook

        :param resources: dict of resources to provision
        :param action: Which ansible action to run (default: 'up')
        :param console: Whether to display the ansible console (default: True)
        """

        return_code = 0
        results = []

        self.set_evar('_action', action)
        self.set_evar('state', 'present')

        if action == 'setup' or action == 'ask_sudo_setup':
            self.set_evar('setup_providers', providers)
            return_code, res = self._find_n_run_pb(action,
                                                   "localhost",
                                                   console=console)
            if res:
                results.append(res)
            if not len(results):
                results = None
            return (return_code, results)

        if action == 'destroy':
            self.set_evar('state', 'absent')

        inventory_src = '{0}/localhost'.format(self.workspace)

        for resource in resources:
            self.set_evar('resources', resource)
            playbook = resource.get('resource_group_type')
            return_code, res = self._find_n_run_pb(playbook,
                                                   inventory_src,
                                                   console=console)
            if action == "up" and return_code > 0:
                raise LinchpinError("Unsuccessful provision of resource "
                                    "System return: {0}".format(return_code))
                sys.exit(return_code)

            if res:
                results.append(res)

        if not len(results):
            results = None

        return (return_code, results)
示例#21
0
    def set_flag_ignore_failed_hooks(self, flag):
        """
        set_flag_ignore_failed_hooks sets current ignore_failed_hooks flag value

        param: flag: boolean

        """

        if isinstance(flag, bool):
            return self.context.set_cfg("hookflags",
                                        "ignore_failed_hooks",
                                        flag)
        raise LinchpinError("Incorrect datatype please use boolean")
示例#22
0
    def set_vault_encryption(self, vault_enc):
        """
        set_vault_encryption sets vault_encryption flag
        if credentials are encrypted in vault current credentials path

        param: vault_enc: boolean

        returns boolean
        """

        if isinstance(vault_enc, bool):
            return self.context.set_evar("vault_encryption", vault_enc)
        raise LinchpinError("Incorrect datatype please use boolean")
示例#23
0
    def call_clone(self, fetch_dir=None):
        if fetch_dir:
            retval = subprocess.call(
                ['git', '-C', fetch_dir, 'pull', '--quiet'])
            tempdir = fetch_dir
        else:
            tempdir = tempfile.mkdtemp(prefix="git_", dir=self.cache_dir)
            retval = subprocess.call(
                ['git', 'clone', '--quiet', self.src, tempdir])

        if retval != 0:
            raise LinchpinError("Unable to clone {0}".format(self.src))
        return tempdir
示例#24
0
    def set_credentials_path(self, creds_path):
        """
        set_credentials_path function set credentials path

        :param creds_path: path to credential directory

        returns True/False
        """

        if os.path.isdir(creds_path):
            return self.context.set_evar("default_credentials_path",
                                         creds_path)
        raise LinchpinError("Incorrect file path, path should be a directory")
示例#25
0
    def _convert_topology(self, topology):
        """
        For backward compatiblity, convert the old topology format
        into the new format. Should be pretty straightforward and simple.

        ;param topology: topology dictionary
        """
        try:
            res_grps = topology.get('resource_groups')
            if res_grps:
                for res_grp in res_grps:
                    if 'res_group_type' in res_grp.keys():
                        res_grp['resource_group_type'] = (
                            res_grp.pop('res_group_type'))

                    if 'res_defs' in res_grp.keys():
                        res_grp['resource_definitions'] = (
                            res_grp.pop('res_defs'))

                    res_defs = res_grp.get('resource_definitions')
                    if not res_defs:
                        # this means it's either a beaker or openshift topology
                        res_grp_type = res_grp.get('resource_group_type')

                        res_group = self._fix_broken_topologies(
                            res_grp, res_grp_type)
                        res_defs = res_group.get('resource_definitions')
                        res_grp['resource_definitions'] = res_defs

                    if res_defs:
                        for res_def in res_defs:
                            if 'res_name' in res_def.keys():
                                res_def['name'] = res_def.pop('res_name')
                            if 'type' in res_def.keys():
                                res_def['role'] = res_def.pop('type')
                            if 'res_type' in res_def.keys():
                                res_def['role'] = res_def.pop('res_type')
                            if 'count' in res_def.keys():
                                res_def['count'] = int(res_def.pop('count'))
                    else:
                        raise TopologyError("'resource_definitions' do not"
                                            " validate in topology"
                                            " ({0})".format(topology))
            else:
                raise TopologyError("'resource_groups' do not validate"
                                    " in topology ({0})".format(topology))

        except Exception:
            raise LinchpinError("Unknown error converting schema. Check"
                                " template data")
示例#26
0
    def parse_json_yaml(self, data):
        """ parses yaml file into json object """

        d = None

        try:
            data = yaml.load(data, Loader=yamlordereddictloader.Loader)
        except Exception as e:
            raise LinchpinError('YAML parsing error: {}'.format(e))

        if isinstance(data, dict):
            return data

        return d
示例#27
0
    def transfer_section(self, section):
        dest_dir = os.path.join(self.dest, section)
        dir_exists = True
        if section not in os.listdir(self.dest):
            dir_exists = False
            os.makedirs(dest_dir)

        src_dir = os.path.join('{0}/{1}'.format(self.td, self.root), section)
        if not os.path.exists(src_dir):
            if not dir_exists:
                shutil.rmtree(dest_dir)
            raise LinchpinError('The {0} directory does not exist in '
                                '{1}'.format(self.fetch_type, self.src))
        self.copy_dir(src_dir, dest_dir)
示例#28
0
    def _get_role(self, role):
        for path in self.role_path:
            p = '{0}/{1}'.format(path, role)

            if os.path.exists(os.path.expanduser(p)):
                return

        # if the role is not in role_path, ansible-galaxy may be able to install
        # it (if it has not already).  galaxy_runner() will return True if the
        # role was successfully installed or if the role was previously
        # installed.  It will return false otherwise
        if not galaxy_runner.install(role):
            raise LinchpinError("role '{0}' not found in path: {1}\n. It also"
                                " could not be installed via Ansible"
                                " Galaxy".format(role, self.role_path))
示例#29
0
    def _find_playbook_path(self, playbook):
        """
        returns the full path to a given playbook

        :params playbook: name of the playbook
        """

        for path in self.pb_path:
            p = '{0}/{1}{2}'.format(path, playbook, self.pb_ext)

            if os.path.exists(os.path.expanduser(p)):
                return path

        raise LinchpinError("playbook '{0}' not found in"
                            " path: {1}".format(playbook, self.pb_path))
示例#30
0
    def _validate_topology(self, topology):
        """
        Validate the provided topology against the schema

        ;param topology: topology dictionary
        """

        res_grps = topology.get('resource_groups')
        resources = []

        for group in res_grps:
            res_grp_type = (group.get('resource_group_type')
                            or group.get('res_group_type'))

            pb_path = self._find_playbook_path(res_grp_type)

            try:
                sp = "{0}/roles/{1}/files/schema.json".format(
                    pb_path, res_grp_type)
                schema = json.load(open(sp))
            except Exception as e:
                raise LinchpinError("Error with schema: '{0}'"
                                    " {1}".format(sp, e))

            res_defs = group.get('resource_definitions')

            # preload this so it will validate against the schema
            document = {'res_defs': res_defs}
            v = AnyofValidator(schema, error_handler=ValidationErrorHandler)

            if not v.validate(document):
                try:
                    err = self._gen_error_msg("", "", v.errors)
                    raise SchemaError(err)
                except NotImplementedError as e:
                    # we shouldn't have this issue using cerberus >= 1.2, but
                    # this is here just in case an older version has to be used
                    print("There was an error validating your schema, but we\
                          can't seem to format it for you")
                    print("Here's the raw error data in case you want to go\
                          through it by hand:")
                    print(v._errors)

            resources.append(group)

        return resources