Ejemplo n.º 1
0
    def write_role_manifest_vars(self, ansible_dir, role, kind):
        enabled_kinds = {
            "configuration/haproxy", "configuration/node-exporter"
        }

        if kind not in enabled_kinds:
            return  # skip

        try:
            cluster_model = select_single(
                self.manifest_docs, lambda x: x.kind == 'epiphany-cluster')
        except ExpectedSingleResultException:
            return  # skip

        document = select_first(self.manifest_docs, lambda x: x.kind == kind)
        if document is None:
            # If there is no document provided by the user, then fallback to defaults
            document = load_yaml_obj(types.DEFAULT, 'common', kind)
            # Inject the required "version" attribute
            document['version'] = VERSION

        # Copy the "provider" value from the cluster model
        document['provider'] = cluster_model['provider']

        # Merge the document with defaults
        with DefaultMerger([document]) as doc_merger:
            document = doc_merger.run()[0]

        self.write_role_vars(ansible_dir,
                             role,
                             document,
                             vars_file_name='manifest.yml')
Ejemplo n.º 2
0
    def populate_group_vars(self, ansible_dir):
        main_vars = ObjDict()
        main_vars['admin_user'] = self.cluster_model.specification.admin_user
        main_vars['validate_certs'] = Config().validate_certs
        main_vars['offline_requirements'] = Config().offline_requirements
        main_vars['wait_for_pods'] = Config().wait_for_pods
        main_vars['is_upgrade_run'] = self.is_upgrade_run
        main_vars['roles_with_generated_vars'] = sorted(
            self.roles_with_generated_vars)

        if self.is_upgrade_run:
            shared_config_doc = self.get_shared_config_from_manifest()
        else:
            shared_config_doc = select_first(
                self.config_docs,
                lambda x: x.kind == 'configuration/shared-config')

        if shared_config_doc is None:
            shared_config_doc = load_yaml_obj(types.DEFAULT, 'common',
                                              'configuration/shared-config')

        self.set_vault_path(shared_config_doc)
        main_vars.update(shared_config_doc.specification)

        vars_dir = os.path.join(ansible_dir, 'group_vars')
        if not os.path.exists(vars_dir):
            os.makedirs(vars_dir)

        vars_file_name = 'all.yml'
        vars_file_path = os.path.join(vars_dir, vars_file_name)

        with open(vars_file_path, 'a') as stream:
            dump(main_vars, stream)
Ejemplo n.º 3
0
    def run(self):
        configuration_docs = []

        def append_config(doc):
            doc['version'] = VERSION
            configuration_docs.append(doc)

        for document_kind in ConfigurationAppender.REQUIRED_DOCS:
            doc = select_first(self.input_docs, lambda x: x.kind == document_kind)
            if doc is None:
                doc = load_yaml_obj(types.DEFAULT, 'common', document_kind)
                self.logger.info("Adding: " + doc.kind)
                append_config(doc)
            else:
                append_config(doc)

        for component_key, component_value in self.cluster_model.specification.components.items():
            if component_value.count < 1:
                continue

            features_map = select_first(configuration_docs, lambda x: x.kind == 'configuration/feature-mapping')
            config_selector = component_value.configuration
            for feature_key in features_map.specification.roles_mapping[component_key]:
                config = select_first(self.input_docs, lambda x: x.kind == 'configuration/' + feature_key and x.name == config_selector)
                if config is not None:
                    append_config(config)
                if config is None:
                    config = select_first(configuration_docs, lambda
                        x: x.kind == 'configuration/' + feature_key and x.name == config_selector)
                if config is None:
                    config = merge_with_defaults('common', 'configuration/' + feature_key, config_selector)
                    self.logger.info("Adding: " + config.kind)
                    append_config(config)

        return configuration_docs
Ejemplo n.º 4
0
    def __init__(self, cluster_model, validation_docs):
        super().__init__(__name__)
        self.cluster_model = cluster_model
        self.validation_docs = validation_docs

        base = load_yaml_obj(types.VALIDATION, self.cluster_model.provider,
                             'core/base')
        self.definitions = load_yaml_obj(types.VALIDATION,
                                         self.cluster_model.provider,
                                         'core/definitions')

        self.base_schema = dict_to_objdict(deepcopy(base))
        self.base_schema['definitions'] = self.definitions

        self.base_schema_no_provider = dict_to_objdict(deepcopy(base))
        self.base_schema_no_provider['definitions'] = self.definitions
        del self.base_schema_no_provider.required[0]
        del self.base_schema_no_provider.properties['provider']
Ejemplo n.º 5
0
 def run(self):
     for doc in self.validation_docs:
         self.logger.info('Validating: ' + doc.kind)
         schema = self.get_base_schema(doc.kind)
         schema['specification'] = load_yaml_obj(
             types.VALIDATION, self.cluster_model.provider, doc.kind)
         if schema["specification"][
                 '$ref'] == '#/definitions/unvalidated_specification':
             self.logger.warn('No specification validation for ' + doc.kind)
         validate(instance=objdict_to_dict(doc),
                  schema=objdict_to_dict(schema))
Ejemplo n.º 6
0
 def run(self):
     for doc in self.validation_docs:
         self.logger.info(f'Validating: {doc.kind}')
         schema = self.get_base_schema(doc.kind)
         schema['properties']['specification'] = load_yaml_obj(
             types.VALIDATION, self.cluster_model.provider, doc.kind)
         if hasattr(schema['properties']["specification"], '$ref'):
             if schema['properties']["specification"][
                     '$ref'] == '#/definitions/unvalidated_specification':
                 self.logger.warn('No specification validation for ' +
                                  doc.kind)
         try:
             validate(instance=objdict_to_dict(doc),
                      schema=objdict_to_dict(schema))
         except Exception as e:
             self.logger.error(f'Failed validating: {doc.kind}')
             self.logger.error(e)
             raise Exception(
                 'Schema validation error, see the error above.')
Ejemplo n.º 7
0
    def run(self):
        configuration_docs = []

        for component_key, component_value in self.cluster_model.specification.components.items(
        ):
            if component_value.count < 1:
                continue

            features_map = select_first(
                self.input_docs,
                lambda x: x.kind == 'configuration/feature-mapping')
            if features_map is None:
                features_map = select_first(
                    configuration_docs,
                    lambda x: x.kind == 'configuration/feature-mapping')

            if features_map is None:
                features_map = load_yaml_obj(types.DEFAULT, 'common',
                                             'configuration/feature-mapping')
                self.logger.info("Adding: " + features_map.kind)
                configuration_docs.append(features_map)

            config_selector = component_value.configuration
            for feature_key in features_map.specification.roles_mapping[
                    component_key]:
                config = select_first(
                    self.input_docs, lambda x: x.kind == 'configuration/' +
                    feature_key and x.name == config_selector)
                if config is None:
                    config = select_first(
                        configuration_docs,
                        lambda x: x.kind == 'configuration/' + feature_key and
                        x.name == config_selector)
                if config is None:
                    config = merge_with_defaults(
                        'common', 'configuration/' + feature_key,
                        config_selector)
                    self.logger.info("Adding: " + config.kind)
                    configuration_docs.append(config)

        return configuration_docs
Ejemplo n.º 8
0
    def populate_group_vars(self, ansible_dir):
        main_vars = ObjDict()
        main_vars['admin_user'] = self.cluster_model.specification.admin_user
        main_vars['validate_certs'] = Config().validate_certs
        main_vars['offline_requirements'] = Config().offline_requirements
        main_vars['wait_for_pods'] = Config().wait_for_pods

        shared_config_doc = select_first(self.config_docs, lambda x: x.kind == 'configuration/shared-config')
        if shared_config_doc == None:
            shared_config_doc = load_yaml_obj(types.DEFAULT, 'common', 'configuration/shared-config')
        main_vars.update(shared_config_doc.specification)        

        vars_dir = os.path.join(ansible_dir, 'group_vars')
        if not os.path.exists(vars_dir):
            os.makedirs(vars_dir)

        vars_file_name = 'all.yml'
        vars_file_path = os.path.join(vars_dir, vars_file_name)

        with open(vars_file_path, 'a') as stream:
            dump(main_vars, stream)
Ejemplo n.º 9
0
    def run_for_individual_documents(self):
        for doc in self.validation_docs:
            # Load document schema
            schema = load_yaml_obj(types.VALIDATION,
                                   self.cluster_model.provider, doc.kind)

            # Include "definitions"
            schema['definitions'] = self.definitions

            # Warn the user about the missing validation
            if hasattr(schema, '$ref'):
                if schema['$ref'] == '#/definitions/unvalidated_specification':
                    self.logger.warn('No specification validation for ' +
                                     doc.kind)

            # Assert the schema
            try:
                validate(instance=objdict_to_dict(doc),
                         schema=objdict_to_dict(schema))
            except Exception as e:
                self.logger.error(f'Failed validating: {doc.kind}')
                self.logger.error(e)
                raise Exception(
                    'Schema validation error, see the error above.')
Ejemplo n.º 10
0
    def _process_configuration_docs(self):
        """Populate input yaml documents with additional required ad-hoc data."""

        # Seed the self.configuration_docs
        self.configuration_docs = copy.deepcopy(self.input_docs)

        # Please notice using DefaultMerger is not needed here, since it is done already at this point.
        # We just check if documents are missing and insert default ones without the unneeded merge operation.
        for kind in {'configuration/backup', 'configuration/recovery'}:
            try:
                # Check if the required document is in user inputs
                document = select_single(self.configuration_docs, lambda x: x.kind == kind)
            except ExpectedSingleResultException:
                # If there is no document provided by the user, then fallback to defaults
                document = load_yaml_obj(data_types.DEFAULT, 'common', kind)
                # Inject the required "version" attribute
                document['version'] = VERSION
                # Copy the "provider" value from the cluster model
                document['provider'] = self.cluster_model.provider
                # Save the document for later use
                self.configuration_docs.append(document)
            finally:
                # Copy the "provider" value to the specification as well
                document.specification['provider'] = document['provider']
Ejemplo n.º 11
0
 def get_config_or_default(docs, kind):
     config = select_first(docs, lambda x: x.kind == kind)
     if config is None:
         config = load_yaml_obj(types.DEFAULT, 'azure', kind)
         config['version'] = VERSION
     return config
Ejemplo n.º 12
0
    def upgrade(self):
        inventory_path = get_inventory_path_for_build(self.backup_build_dir)
        build_version = check_build_output_version(self.backup_build_dir)

        self.logger.info(f'Loading backup Ansible inventory: {inventory_path}')
        loaded_inventory = InventoryManager(loader=DataLoader(),
                                            sources=inventory_path)

        # move loaded inventory to templating structure
        new_inventory = []
        for key in loaded_inventory.groups:
            if key != 'all' and key != 'ungrouped':
                group_hosts = loaded_inventory.groups[key].hosts
                new_hosts = []
                for host in group_hosts:
                    new_hosts.append(
                        AnsibleHostModel(host.address,
                                         host.vars['ansible_host']))
                new_inventory.append(AnsibleInventoryItem(key, new_hosts))

        if build_version == BUILD_LEGACY:
            self.logger.info(f'Upgrading Ansible inventory Epiphany < 0.3.0')

            # Epiphany < 0.3.0 did not have manifest file in build folder so lets create bare minimum cluster model from inventory
            self.cluster_model = dict_to_objdict({
                'provider': 'any',
                'specification': {
                    'admin_user': {
                        'name':
                        loaded_inventory.groups['all'].vars['ansible_user'],
                        'key_path':
                        loaded_inventory.groups['all'].
                        vars['ansible_ssh_private_key_file']
                    }
                }
            })

            # Remap roles
            self.rename_role(new_inventory, 'master', 'kubernetes_master')
            self.rename_role(new_inventory, 'worker', 'kubernetes_node')
            self.rename_role(new_inventory, 'deployments', 'applications')
            self.rename_role(new_inventory, 'elasticsearch-curator',
                             'elasticsearch_curator')
            self.rename_role(new_inventory, 'jmx-exporter', 'jmx_exporter')
            self.rename_role(new_inventory, 'kafka-exporter', 'kafka_exporter')
            self.rename_role(new_inventory, 'haproxy_tls_termination',
                             'haproxy')

            # remove linux and reboot roles if present
            self.delete_role(new_inventory, 'linux')
            self.delete_role(new_inventory, 'reboot')
        else:
            self.logger.info(f'Upgrading Ansible inventory Epiphany => 0.3.0')

            # load cluster model from manifest
            self.manifest_docs = load_manifest_docs(self.backup_build_dir)
            self.cluster_model = select_single(
                self.manifest_docs, lambda x: x.kind == 'epiphany-cluster')

        # Merge manifest cluster config with newer defaults
        default_cluster_model = load_yaml_obj(data_types.DEFAULT, 'common',
                                              'epiphany-cluster')
        merge_objdict(default_cluster_model, self.cluster_model)
        self.cluster_model = default_cluster_model

        # Check if repo roles are present and if not add them
        master = self.get_role(new_inventory, 'kubernetes_master')
        if master == None:
            raise Exception('No kubernetes_master to use as repository')
        master_node = master.hosts[0]

        # add image_registry
        image_registry = self.get_role(new_inventory, 'image_registry')
        if image_registry == None:
            hosts = []
            hosts.append(AnsibleHostModel(master_node.name, master_node.ip))
            new_inventory.append(AnsibleInventoryItem('image_registry', hosts))

        # add repository
        repository = self.get_role(new_inventory, 'repository')
        if repository == None:
            hosts = []
            hosts.append(AnsibleHostModel(master_node.name, master_node.ip))
            new_inventory.append(AnsibleInventoryItem('repository', hosts))

        # save new inventory
        save_inventory(new_inventory, self.cluster_model, self.build_dir)

        return 0
Ejemplo n.º 13
0
 def get_config_or_default(docs, kind):
     config = select_first(docs, lambda x: x.kind == kind)
     if config is None:
         return load_yaml_obj(types.DEFAULT, 'aws', kind)
     return config