Пример #1
0
    def _node_data_get_context_section(self, context):
        """
        Create the cloud-init config file
        """
        cloud_config = context.get("cloud_config")
        if not context:
            logger.debug("The adaptor will use a default cloud-config")
            node_init = self._get_cloud_init(None)
        elif not cloud_config:
            logger.debug(
                "No cloud-config provided... using default cloud-config")
            node_init = self._get_cloud_init(None)
        elif context.get("insert"):
            logger.debug("Insert the TOSCA cloud-config in the default config")
            node_init = self._get_cloud_init(cloud_config, "insert")
        elif context.get("append"):
            logger.debug("Append the TOSCA cloud-config to the default config")
            node_init = self._get_cloud_init(cloud_config, "append")
        else:
            logger.debug("Overwrite the default cloud-config")
            node_init = self._get_cloud_init(cloud_config, "overwrite")

        cloud_init_file_name = "{}-cloud-init.yaml".format(self.node_name)
        cloud_init_path = "{}{}".format(self.volume, cloud_init_file_name)
        cloud_init_path_tmp = "{}.tmp".format(cloud_init_path)

        utils.dump_order_yaml(node_init, cloud_init_path_tmp)
        return cloud_init_path
Пример #2
0
    def _get_infra_def(self, tmp):
        """Read infra definition and modify the min max instances according to the TOSCA policies.
        If the template doesn't have policy section or it is invalid then the adaptor set the default value """

        node_infra = {}
        node_infra['name'] = self.node_name
        node_infra['type'] = self.node_name
        node_infra.setdefault('scaling', {})['min'] = self.min_instances
        node_infra.setdefault('scaling', {})['max'] = self.max_instances

        if not tmp and os.path.isfile(self.infra_def_path_output):
            path = self.infra_def_path_output
        elif tmp and os.path.isfile(self.infra_def_path_output_tmp):
            path = self.infra_def_path_output_tmp
        else:
            path = self.infra_def_path_input
        if self.validate is False or tmp:
            try:
                infra_def = {}
                infra_def = utils.get_yaml_data(path)
                infra_def.setdefault('nodes', [])
                infra_def["nodes"].append(node_infra)
            except OSError as e:
                logger.error(e)

            if tmp:
                utils.dump_order_yaml(infra_def,
                                      self.infra_def_path_output_tmp)
            elif self.validate is False:
                utils.dump_order_yaml(infra_def, self.infra_def_path_output)
Пример #3
0
    def translate(self, tmp=False):
        """
        Translate the self.tpl subset to Occopus node definition and infrastructure format
        The adaptor create a mapping between TOSCA and Occopus template descriptor.
        """
        self.node_def = {}
        logger.info("Starting OccoTranslation")
        self.status = "translating"

        for node in self.template.nodetemplates:

            if '_' in node.name:                
                raise AdaptorCritical("Underscores in node {} not allowed".format(node.name))
            self.node_name = node.name
            self.node_data = {}
            
            node = copy.deepcopy(node)
            occo_interface = self._node_data_get_interface(node)
            if not occo_interface:
                continue

            self._node_resolve_interface_data(node, occo_interface, "resource")
            cloud_type = utils.get_cloud_type(node, SUPPORTED_CLOUDS)

            if cloud_type == "cloudsigma":
                logger.info("CloudSigma resource detected")
                self._node_data_get_cloudsigma_host_properties(node, "resource")
            elif cloud_type == "ec2":
                logger.info("EC2 resource detected")
                self._node_data_get_ec2_host_properties(node, "resource")
            elif cloud_type == "cloudbroker":
                logger.info("CloudBroker resource detected")
                self._node_data_get_cloudbroker_host_properties(node, "resource")
            elif cloud_type == "nova":
                logger.info("Nova resource detected")
                self._node_data_get_nova_host_properties(node, "resource")

            self._get_policies(node)
            self._get_infra_def(tmp)

            node_type = self.node_prefix + self.node_name
            self.node_def.setdefault(node_type, [])
            self.node_def[node_type].append(self.node_data)
        if self.node_def:
            if tmp:
                utils.dump_order_yaml(self.node_def, self.node_path_tmp)
            elif self.validate is False:
                self.prepare_auth_file()
                utils.dump_order_yaml(self.node_def, self.node_path)

        self.status = "translated"
Пример #4
0
    def prepare_auth_file(self):
        """ Prepare the Occopus auth file """
        # Pull the auth data out of the secret
        changes = {}
        try:
            auth_secret = self.load_auth_data_secret()
        except FileNotFoundError:
            logger.error("Auth data not found")
            raise AdaptorCritical
        auth_data = auth_secret.obj["data"]
        auth_file = auth_data.get("auth_data.yaml", {})
        auth_file = base64.decodestring(auth_file.encode())
        auth_file = utils.get_yaml_data(auth_file, stream=True)

        # Modify the auth data
        for resource in auth_file.get("resource", []):
            if resource.get("type") == "nova":
                auth = resource.get("auth_data", {})
                self.modify_openstack_authentication(auth, changes)

        # Update the secret with the modified auth data
        if changes:
            new_auth_data = utils.dump_order_yaml(auth_file).encode()
            new_auth_data = base64.encodestring(new_auth_data)
            auth_data["auth_data.yaml"] = new_auth_data.decode()
            auth_secret.update()
            self.wait_for_volume_update(changes)
Пример #5
0
    def save_template(self, adt):
        """
        Saves the template and returns the path
        """
        if not adt:
            abort(400, "No ADT data was included in the request")
        if not os.path.exists(os.path.dirname(self.path)):
            abort(500, f"Path {self.path} is not valid")

        if isinstance(adt, dict):
            utils.dump_order_yaml(adt, self.path)
        else:
            try:
                adt.save(self.path)
            except AttributeError:
                abort(400, "ADT data must be YAML file or dict")
        return self.path
 def _save_file(self, id_app, path):
     """ method called by the engine to dump the current template being treated to the files/templates directory, with as name
     the ID of the app.
     """
     data = utils.get_yaml_data(path)
     utils.dump_order_yaml(data, "files/templates/{}.yaml".format(id_app))