Esempio n. 1
0
 def post(self):
     """
     Add a new EMS. The request must provide the ems details.
     used by: `katana ems add -f [yaml file]`
     """
     # TODO: Test connectivity with the EMS
     new_uuid = str(uuid.uuid4())
     # Create the object and store it in the object collection
     try:
         ems_id = request.json["id"]
         if request.json["type"] == "amarisoft-ems":
             ems = amar_emsUtils.Ems(request.json["url"])
         elif request.json["type"] == "test-ems":
             ems = test_emsUtils.Ems(request.json["url"])
         else:
             return "Error: Not supported EMS type", 400
     except KeyError:
         return f"Error: Required fields: {self.req_fields}", 400
     thebytes = pickle.dumps(ems)
     obj_json = {
         "_id": new_uuid,
         "id": request.json["id"],
         "obj": Binary(thebytes)
     }
     request.json["_id"] = new_uuid
     request.json["created_at"] = time.time()  # unix epoch
     try:
         new_uuid = mongoUtils.add("ems", request.json)
     except pymongo.errors.DuplicateKeyError:
         return f"EMS with id {ems_id} already exists", 400
     mongoUtils.add("ems_obj", obj_json)
     return f"Created {new_uuid}", 201
Esempio n. 2
0
 def post(self):
     """
     Add a new policy management system. The request must provide the
      system details. used by: `katana policy add -f [yaml file]`
     """
     # Create the object and store it in the object collection
     try:
         if request.json["type"] == "test-policy":
             policy = test_policyUtils.Policy(id=request.json["id"], url=request.json["url"])
         elif request.json["type"] == "neat":
             policy = neatUtils.Policy(id=request.json["id"], url=request.json["url"])
         else:
             return "Error: Not supported Policy system type", 400
     except KeyError:
         return f"Error: Required fields: {self.req_fields}", 400
     new_uuid = str(uuid.uuid4())
     request.json["_id"] = new_uuid
     request.json["created_at"] = time.time()  # unix epoch
     try:
         new_uuid = mongoUtils.add("policy", request.json)
     except pymongo.errors.DuplicateKeyError:
         return (
             "Policy management system with id {0} already exists".format(request.json["id"]),
             400,
         )
     # Store the policy object to the mongo db
     thebytes = pickle.dumps(policy)
     obj_json = {"_id": new_uuid, "id": request.json["id"], "obj": Binary(thebytes)}
     mongoUtils.add("policy_obj", obj_json)
     return f"Created {new_uuid}", 201
 def post(self):
     """
     Add a new wim. The request must provide the wim details.
     used by: `katana wim add -f [yaml file]`
     """
     # TODO: Test connectivity with the WIM
     new_uuid = str(uuid.uuid4())
     # Create the object and store it in the object collection
     try:
         wim_id = request.json["id"]
         if request.json["type"] == "odl-wim":
             wim = odl_wimUtils.Wim(request.json['url'])
         elif request.json["type"] == "test-wim":
             wim = test_wimUtils.Wim(request.json['url'])
         else:
             return "Error: Not supported WIM type", 400
     except KeyError:
         return f"Error: Required fields: {self.req_fields}", 400
     thebytes = pickle.dumps(wim)
     obj_json = {
         "_id": new_uuid,
         "id": request.json["id"],
         "obj": Binary(thebytes)
     }
     request.json['_id'] = new_uuid
     request.json['created_at'] = time.time()  # unix epoch
     request.json['slices'] = {}
     try:
         new_uuid = mongoUtils.add('wim', request.json)
     except pymongo.errors.DuplicateKeyError:
         return f"WIM with id {wim_id} already exists", 400
     mongoUtils.add('wim_obj', obj_json)
     return f"Created {new_uuid}", 201
Esempio n. 4
0
    def put(self, uuid):
        """
        Update the details of a specific wim.
        used by: `katana wim update -f [yaml file] [uuid]`
        """
        data = request.json
        data["_id"] = uuid
        old_data = mongoUtils.get("wim", uuid)

        if old_data:
            data["created_at"] = old_data["created_at"]
            data["slices"] = old_data["slices"]
            try:
                for entry in self.req_fields:
                    if data[entry] != old_data[entry]:
                        return "Cannot update field: " + entry, 400
            except KeyError:
                return f"Error: Required fields: {self.req_fields}", 400
            else:
                mongoUtils.update("wim", uuid, data)
            return f"Modified {uuid}", 200
        else:
            new_uuid = uuid
            data = request.json
            data["_id"] = new_uuid
            data["created_at"] = time.time()  # unix epoch
            try:
                wim_id = request.json["id"]
                if request.json["type"] == "odl-wim":
                    wim = odl_wimUtils.Wim(request.json["url"])
                elif request.json["type"] == "test-wim":
                    wim = test_wimUtils.Wim(request.json["url"])
                else:
                    return "Error: Not supported WIM type", 400
            except KeyError:
                return f"Error: Required fields: {self.req_fields}", 400
            thebytes = pickle.dumps(wim)
            obj_json = {"_id": new_uuid, "id": data["id"], "obj": Binary(thebytes)}
            data["slices"] = {}
            try:
                new_uuid = mongoUtils.add("wim", data)
            except pymongo.errors.DuplicateKeyError:
                return f"WIM with id {wim_id} already exists", 400
            try:
                monitoring_url = request.json["monitoring-url"]
            except KeyError:
                pass
            else:
                with open("/targets/wim_targets.json", mode="r") as prom_file:
                    prom = json.load(prom_file)
                    prom.append({"targets": [monitoring_url], "labels": {"wim_id": wim_id}})
                with open("/targets/wim_targets.json", mode="w") as prom_file:
                    json.dump(prom, prom_file)
            mongoUtils.add("wim_obj", obj_json)
            return f"Created {new_uuid}", 201
    def put(self, uuid):
        """
        Update the details of a specific policy engine system.
        used by: `katana policy update [uuid] -f [yaml file]`
        """
        data = request.json
        data["_id"] = uuid
        old_data = mongoUtils.get("policy", uuid)

        if old_data:
            data["created_at"] = old_data["created_at"]
            try:
                for entry in self.req_fields:
                    if data[entry] != old_data[entry]:
                        return "Cannot update field: " + entry, 400
            except KeyError:
                return f"Error: Required fields: {self.req_fields}", 400
            else:
                mongoUtils.update("policy", uuid, data)
            return f"Modified {uuid}", 200
        else:
            # Create the object and store it in the object collection
            try:
                if request.json["type"] == "test-policy":
                    policy = test_policyUtils.Policy(id=request.json["id"],
                                                     url=request.json["url"])
                elif request.json["type"] == "neat":
                    policy = neatUtils.Policy(id=request.json["id"],
                                              url=request.json["url"])
                else:
                    return "Error: Not supported Policy system type", 400
            except KeyError:
                return f"Error: Required fields: {self.req_fields}", 400
            new_uuid = str(uuid.uuid4())
            request.json["_id"] = new_uuid
            request.json["created_at"] = time.time()  # unix epoch
            try:
                new_uuid = mongoUtils.add("policy", request.json)
            except pymongo.errors.DuplicateKeyError:
                return (
                    "Policy management system with id {0} already exists".
                    format(request.json["id"]),
                    400,
                )
            # Store the policy object to the mongo db
            thebytes = pickle.dumps(policy)
            obj_json = {
                "_id": new_uuid,
                "id": request.json["id"],
                "obj": Binary(thebytes)
            }
            mongoUtils.add("policy_obj", obj_json)
            return f"Created {new_uuid}", 201
Esempio n. 6
0
    def post(self):
        """
        Add a new nfvo. The request must provide the nfvo details.
        used by: `katana nfvo add -f [yaml file]`
        """
        new_uuid = str(uuid.uuid4())
        request.json["_id"] = new_uuid
        request.json["created_at"] = time.time()  # unix epoch
        request.json["tenants"] = {}

        if request.json["type"] == "OSM":
            # Create the NFVO object
            try:
                osm_username = request.json["nfvousername"]
                osm_password = request.json["nfvopassword"]
                osm_ip = request.json["nfvoip"]
                osm_project_name = request.json["tenantname"]
                nfvo_id = request.json["id"]
            except KeyError:
                return f"Error: Required fields: {self.req_fields}", 400
            else:
                osm = osmUtils.Osm(nfvo_id, osm_ip, osm_username, osm_password,
                                   osm_project_name)
            try:
                osm.getToken()
            except ConnectTimeout as e:
                logger.exception("Connection Timeout: {}".format(e))
                response = dumps({"error": "Unable to connect to NFVO"})
                return (response, 400)
            except ConnectionError as e:
                logger.exception("Connection Error: {}".format(e))
                response = dumps({"error": "Unable to connect to NFVO"})
                return (response, 400)
            else:
                # Store the osm object to the mongo db
                thebytes = pickle.dumps(osm)
                obj_json = {
                    "_id": new_uuid,
                    "id": request.json["id"],
                    "obj": Binary(thebytes)
                }
                try:
                    new_uuid = mongoUtils.add("nfvo", request.json)
                except pymongo.errors.DuplicateKeyError:
                    return f"NFVO with id {nfvo_id} already exists", 400
                mongoUtils.add("nfvo_obj", obj_json)
                # Get information regarding VNFDs and NSDs
                osmUtils.bootstrapNfvo(osm)
                return f"Created {new_uuid}", 201
        else:
            response = dumps({"error": "This type nfvo is not supported"})
            return response, 400
Esempio n. 7
0
    def put(self, uuid):
        """
        Update the details of a specific EMS.
        used by: `katana ems update -f [yaml file] [uuid]`
        """
        data = request.json
        data["_id"] = uuid
        old_data = mongoUtils.get("ems", uuid)

        if old_data:
            data["created_at"] = old_data["created_at"]
            try:
                for entry in self.req_fields:
                    if data[entry] != old_data[entry]:
                        return "Cannot update field: " + entry, 400
            except KeyError:
                return f"Error: Required fields: {self.req_fields}", 400
            else:
                mongoUtils.update("ems", uuid, data)
            return f"Modified {uuid}", 200
        else:
            new_uuid = uuid
            data = request.json
            data["_id"] = new_uuid
            data["created_at"] = time.time()  # unix epoch
            new_uuid = str(uuid.uuid4())
            # Create the object and store it in the object collection
            try:
                ems_id = request.json["id"]
                if request.json["type"] == "amarisoft-ems":
                    ems = amar_emsUtils.Ems(request.json["url"])
                elif request.json["type"] == "test-ems":
                    ems = test_emsUtils.Ems(request.json["url"])
                else:
                    return "Error: Not supported EMS type", 400
            except KeyError:
                return f"Error: Required fields: {self.req_fields}", 400
            thebytes = pickle.dumps(ems)
            obj_json = {
                "_id": new_uuid,
                "id": data["id"],
                "obj": Binary(thebytes)
            }
            try:
                new_uuid = mongoUtils.add("ems", data), 201
            except pymongo.errors.DuplicateKeyError:
                return f"EMS with id {ems_id} already exists", 400
            mongoUtils.add("ems_obj", obj_json)
            return f"Created {new_uuid}", 201
    def put(self, uuid):
        """
        Update the details of a specific wim.
        used by: `katana wim update -f [yaml file] [uuid]`
        """
        data = request.json
        data['_id'] = uuid
        old_data = mongoUtils.get("wim", uuid)

        if old_data:
            data["created_at"] = old_data["created_at"]
            data["slices"] = old_data["slices"]
            try:
                for entry in self.req_fields:
                    if data[entry] != old_data[entry]:
                        return "Cannot update field: " + entry, 400
            except KeyError:
                return f"Error: Required fields: {self.req_fields}", 400
            else:
                mongoUtils.update("wim", uuid, data)
            return f"Modified {uuid}", 200
        else:
            new_uuid = uuid
            data = request.json
            data['_id'] = new_uuid
            data['created_at'] = time.time()  # unix epoch
            try:
                wim_id = request.json["id"]
                if request.json["type"] == "odl-wim":
                    wim = odl_wimUtils.Wim(request.json['url'])
                elif request.json["type"] == "test-wim":
                    wim = test_wimUtils.Wim(request.json['url'])
                else:
                    return "Error: Not supported WIM type", 400
            except KeyError:
                return f"Error: Required fields: {self.req_fields}", 400
            thebytes = pickle.dumps(wim)
            obj_json = {
                "_id": new_uuid,
                "id": data["id"],
                "obj": Binary(thebytes)
            }
            data['slices'] = {}
            try:
                new_uuid = mongoUtils.add('wim', data)
            except pymongo.errors.DuplicateKeyError:
                return f"WIM with id {wim_id} already exists", 400
            mongoUtils.add('wim_obj', obj_json)
            return f"Created {new_uuid}", 201
    def readNsd(self):
        """
        Reads and returns required information from nsd/vnfd
        """
        url = f"https://{self.ip}:9999/osm/nsd/v1/ns_descriptors"
        while True:
            headers = {
                'Content-Type': 'application/yaml',
                'Accept': 'application/json',
                'Authorization': f'Bearer {self.token}',
            }
            response = requests.get(url, headers=headers, verify=False)
            if (response.status_code != 401):
                osm_nsd_list = response.json()
                new_nsd = {}
                for osm_nsd in osm_nsd_list:
                    new_nsd["nsd-id"] = osm_nsd["_id"]
                    new_nsd["nsd-name"] = osm_nsd["id"]
                    new_nsd["vnfd_list"] = []
                    new_nsd["flavor"] = {
                        "memory-mb": 0,
                        "vcpu-count": 0,
                        "storage-gb": 0
                    }
                    for osm_vnfd in osm_nsd['constituent-vnfd']:
                        data = {"name": osm_vnfd["vnfd-id-ref"]}
                        reg_vnfd = mongoUtils.find("vnfd", data)
                        if not reg_vnfd:
                            logger.warning("There is a vnfd missing from the \
NFVO repository")
                        else:
                            new_nsd["vnfd_list"].append(reg_vnfd["name"])
                            new_nsd["flavor"]["memory-mb"] +=\
                                reg_vnfd["flavor"]["memory-mb"]
                            new_nsd["flavor"]["vcpu-count"] +=\
                                reg_vnfd["flavor"]["vcpu-count"]
                            new_nsd["flavor"]["storage-gb"] +=\
                                reg_vnfd["flavor"]["storage-gb"]
                    new_nsd["nfvo_id"] = self.nfvo_id
                    new_nsd["_id"] = str(uuid.uuid4())
                    try:
                        mongoUtils.add("nsd", new_nsd)
                    except pymongo.errors.DuplicateKeyError:
                        continue
                    new_nsd = {}
                break
            else:
                self.getToken()
Esempio n. 10
0
 def post(self):
     """
     Add a new wim. The request must provide the wim details.
     used by: `katana wim add -f [file]`
     """
     # TODO: Test connectivity with the WIM
     new_uuid = str(uuid.uuid4())
     # Create the object and store it in the object collection
     try:
         wim_id = request.json["id"]
         if request.json["type"] == "odl-wim":
             wim = odl_wimUtils.Wim(request.json["url"])
         elif request.json["type"] == "test-wim":
             wim = test_wimUtils.Wim(request.json["url"])
         else:
             return "Error: Not supported WIM type", 400
     except KeyError:
         return f"Error: Required fields: {self.req_fields}", 400
     thebytes = pickle.dumps(wim)
     obj_json = {
         "_id": new_uuid,
         "id": request.json["id"],
         "obj": Binary(thebytes)
     }
     request.json["_id"] = new_uuid
     request.json["created_at"] = time.time()  # unix epoch
     request.json["slices"] = {}
     try:
         new_uuid = mongoUtils.add("wim", request.json)
     except pymongo.errors.DuplicateKeyError:
         return f"WIM with id {wim_id} already exists", 400
     try:
         monitoring_url = request.json["monitoring-url"]
     except KeyError:
         pass
     else:
         with open("/targets/wim_targets.json", mode="r") as prom_file:
             prom = json.load(prom_file)
             prom.append({
                 "targets": [monitoring_url],
                 "labels": {
                     "wim_id": wim_id
                 }
             })
         with open("/targets/wim_targets.json", mode="w") as prom_file:
             json.dump(prom, prom_file)
     mongoUtils.add("wim_obj", obj_json)
     return new_uuid, 201
Esempio n. 11
0
 def put(self, uuid):
     """
     Update a registered platform location
     used by: `katana location update [uuid] -f [file]`
     """
     for ifield in self.req_fields:
         if not request.json.get(ifield, None):
             return f"Field {ifield} is missing"
         else:
             # Lowercase the location
             request.json["id"] = request.json["id"].lower()
     data = request.json
     data["_id"] = uuid
     old_data = mongoUtils.get("location", uuid)
     if old_data:
         if old_data["vims"] or old_data["functions"]:
             return (
                 f"Location {data['_id']} is in use by another component, cannot update it",
                 400,
             )
         data["created_at"] = old_data["created_at"]
         data["vims"] = []
         data["functions"] = []
         mongoUtils.update("location", uuid, data)
         return f"Modified location {data['id']}", 200
     else:
         data["created_at"] = time.time()  # unix epoch
         data["vims"] = []
         data["functions"] = []
         new_uuid = mongoUtils.add("location", request.json)
         return new_uuid, 201
Esempio n. 12
0
    def put(self, uuid):
        """
        Add or update a new supported network function.
        The request must provide the service details.
        used by: `katana function update -f [yaml file]`
        """
        data = request.json
        data['_id'] = uuid
        old_data = mongoUtils.get("func", uuid)

        if old_data:
            data["created_at"] = old_data["created_at"]
            data["tenants"] = []
            if len(data["tenants"]) > 0:
                return f"Error: Func is used by slices {data['tenants']}"
            mongoUtils.update("func", uuid, data)
            return f"Modified {uuid}", 200
        else:
            new_uuid = uuid
            data = request.json
            data['_id'] = new_uuid
            data['created_at'] = time.time()  # unix epoch
            data["tenants"] = []

            for field in self.req_fields:
                try:
                    _ = data[field]
                except KeyError:
                    return f"Error: Required fields: {self.req_fields}", 400
            try:
                new_uuid = mongoUtils.add('func', data)
            except pymongo.errors.DuplicateKeyError:
                return f"Function with id {data['id']} already exists", 400
            return f"Created {new_uuid}", 201
 def post(self):
     """
     Add a new base slice descriptor. The request must provide the base
     slice descriptor details. Used by: `katana slice_des add -f [file]`
     """
     new_uuid = str(uuid.uuid4())
     data = request.json
     data['_id'] = new_uuid
     return str(mongoUtils.add('base_slice_des_id', data)), 201
Esempio n. 14
0
 def readVnfd(self):
     """
     Reads and returns required information from nsd/vnfd
     """
     url = f"https://{self.ip}:9999/osm/vnfpkgm/v1/vnf_packages/"
     while True:
         headers = {
             "Content-Type": "application/yaml",
             "Accept": "application/json",
             "Authorization": f"Bearer {self.token}",
         }
         response = requests.get(url, headers=headers, verify=False)
         if response.status_code != 401:
             osm_vnfd_list = response.json()
             new_vnfd = {}
             for osm_vnfd in osm_vnfd_list:
                 if all(key in osm_vnfd
                        for key in ("id", "_id", "mgmt-interface", "vdu")):
                     new_vnfd["vnfd-id"] = osm_vnfd["_id"]
                     new_vnfd["name"] = osm_vnfd["id"]
                     new_vnfd["flavor"] = {
                         "memory-mb": 0,
                         "vcpu-count": 0,
                         "storage-gb": 0
                     }
                     instances = 0
                     for vdu in osm_vnfd["vdu"]:
                         if "vm-flavor" in vdu.keys():
                             for key in new_vnfd["flavor"]:
                                 new_vnfd["flavor"][key] += int(
                                     vdu["vm-flavor"][key])
                             instances += 1
                     new_vnfd["flavor"]["instances"] = instances
                     new_vnfd["mgmt"] = osm_vnfd["mgmt-interface"]["cp"]
                     new_vnfd["nfvo_id"] = self.nfvo_id
                     new_vnfd["_id"] = str(uuid.uuid4())
                     try:
                         mongoUtils.add("vnfd", new_vnfd)
                     except pymongo.errors.DuplicateKeyError:
                         continue
                     new_vnfd = {}
             break
         else:
             self.getToken()
 def readVnfd(self):
     """
     Reads and returns required information from nsd/vnfd
     """
     url = f"https://{self.ip}:9999/osm/vnfpkgm/v1/vnf_packages/"
     while True:
         headers = {
             'Content-Type': 'application/yaml',
             'Accept': 'application/json',
             'Authorization': f'Bearer {self.token}',
         }
         response = requests.get(url, headers=headers, verify=False)
         if (response.status_code != 401):
             osm_vnfd_list = response.json()
             new_vnfd = {}
             for osm_vnfd in osm_vnfd_list:
                 new_vnfd["vnfd-id"] = osm_vnfd["_id"]
                 new_vnfd["name"] = osm_vnfd["id"]
                 new_vnfd["flavor"] = {
                     "memory-mb": 0,
                     "vcpu-count": 0,
                     "storage-gb": 0
                 }
                 for vdu in osm_vnfd["vdu"]:
                     new_vnfd["flavor"]["memory-mb"] += int(
                         vdu["vm-flavor"]["memory-mb"])
                     new_vnfd["flavor"]["vcpu-count"] += int(
                         vdu["vm-flavor"]["vcpu-count"])
                     new_vnfd["flavor"]["storage-gb"] += int(
                         vdu["vm-flavor"]["storage-gb"])
                 new_vnfd["mgmt"] = osm_vnfd["mgmt-interface"]["cp"]
                 new_vnfd["nfvo_id"] = self.nfvo_id
                 new_vnfd["_id"] = str(uuid.uuid4())
                 try:
                     mongoUtils.add("vnfd", new_vnfd)
                 except pymongo.errors.DuplicateKeyError:
                     continue
                 new_vnfd = {}
             break
         else:
             self.getToken()
Esempio n. 16
0
    def put(self, uuid):
        """
        Add or update a new base slice descriptor.
        The request must provide the service details.
        used by: `katana slice_des update -f [file]`
        """
        data = request.json
        data["_id"] = uuid
        old_data = mongoUtils.get("base_slice_des_ref", uuid)

        if old_data:
            mongoUtils.update("base_slice_des_ref", uuid, data)
            return f"Modified {uuid}", 200
        else:
            new_uuid = uuid
            data = request.json
            data["_id"] = new_uuid
            return str(mongoUtils.add("base_slice_des_ref", data)), 201
Esempio n. 17
0
    def post(self):
        """
        Add a new supported function.
        The request must provide the network function details.
        used by: `katana func add -f [yaml file]`
        """
        new_uuid = str(uuid.uuid4())
        data = request.json
        data['_id'] = new_uuid
        data['created_at'] = time.time()  # unix epoch
        data["tenants"] = []

        for field in self.req_fields:
            try:
                _ = data[field]
            except KeyError:
                return f"Error: Required fields: {self.req_fields}", 400
        try:
            new_uuid = mongoUtils.add('func', data)
        except pymongo.errors.DuplicateKeyError:
            return f"Network Function with id {data['id']} already exists", 400
        return f"Created {new_uuid}", 201
Esempio n. 18
0
 def post(self):
     """
     Register a new platform location
     used by: `katana location add -f [file]`
     """
     # Generate a new uuid
     new_uuid = str(uuid.uuid4())
     request.json["_id"] = new_uuid
     request.json["created_at"] = time.time()  # unix epoch
     request.json["vims"] = []
     request.json["functions"] = []
     for ifield in self.req_fields:
         if not request.json.get(ifield, None):
             return f"Field {ifield} is missing"
         else:
             # Lowercase the location
             request.json["id"] = request.json["id"].lower()
     try:
         new_uuid = mongoUtils.add("location", request.json)
     except pymongo.errors.DuplicateKeyError:
         return (f"Location {request.json['id']} is already registered", 400)
     return new_uuid, 201
def nest_mapping(req):
    """
    Function that maps nest to the underlying network functions
    """
    # Store the gst in DB
    mongoUtils.add("gst", req)

    nest = {"_id": req["_id"]}

    # Recreate the nest req
    for field in NEST_FIELDS:
        req[field] = req.get(field, None)

    # ****** STEP 1: Slice Descriptor ******
    if not req["base_slice_descriptor"]:
        logger.error("No Base Slice Descriptor given - Exit")
        return "NEST Error: No Base Slice Descriptor given", 400
    req_slice_des = req["base_slice_descriptor"]
    # *** Recreate the NEST ***
    for req_key in SLICE_DES_OBJ:
        req_slice_des[req_key] = req_slice_des.get(req_key, None)
    for req_key in SLICE_DES_LIST:
        req_slice_des[req_key] = req_slice_des.get(req_key, [])

    # *** Check if there are references for slice ***
    if req_slice_des["base_slice_des_ref"]:
        ref_slice = mongoUtils.find(
            "base_slice_des_ref", {"base_slice_des_id": req_slice_des["base_slice_des_ref"]}
        )
        if ref_slice:
            for key, value in req_slice_des.items():
                try:
                    if value is None:
                        req_slice_des[key] = ref_slice[key]
                except KeyError:
                    continue
        else:
            logger.error(
                "slice_descriptor {} not found".format(req_slice_des["base_slice_des_ref"])
            )
            return "Error: referenced slice_descriptor not found", 400

    # *************************** Start the mapping ***************************
    # Currently supports:
    # 1) If delay_tolerance --> EMBB else --> URLLC
    #    If EMBB --> EPC Placement=@Core. If URLLC --> EPC Placement=@Edge
    # 2) If network throughput > 100 Mbps --> Type=5G
    # *************************************************************************
    functions_list = []

    if req_slice_des["network_DL_throughput"]["guaranteed"] > 100000:
        gen = 5
    else:
        gen = 4

    # *** Calculate the type of the slice (sst) ***
    if req_slice_des["delay_tolerance"]:
        # EMBB
        nest["sst"] = 1
        epc = mongoUtils.find("func", calc_find_data(gen, "Core", 0))
        if not epc:
            return "Error: Not available Core Network Functions", 400
        connections = []
        not_supp_loc = []
        for location in req_slice_des["coverage"]:
            enb = mongoUtils.find("func", calc_find_data(gen, location, 1))
            if not enb:
                not_supp_loc.append(location)
            else:
                connections.append({"core": epc, "radio": enb})
                enb["tenants"].append(nest["_id"])
                mongoUtils.update("func", enb["_id"], enb)
                functions_list.append(enb["_id"])
        if not epc or not connections:
            return "Error: Not available Network Functions", 400
        epc["tenants"].append(nest["_id"])
        mongoUtils.update("func", epc["_id"], epc)
        functions_list.append(epc["_id"])
        for location in not_supp_loc:
            logger.warning(f"Location {location} not supported")
            req_slice_des["coverage"].remove(location)
    else:
        # URLLC
        nest["sst"] = 2
        connections = []
        not_supp_loc = []
        for location in req_slice_des["coverage"]:
            epc = mongoUtils.find("func", calc_find_data(gen, location, 0))
            enb = mongoUtils.find("func", calc_find_data(gen, location, 1))
            if not epc or not enb:
                not_supp_loc.append(location)
            else:
                connections.append({"core": epc, "radio": enb})
                epc["tenants"].append(nest["_id"])
                enb["tenants"].append(nest["_id"])
                mongoUtils.update("func", enb["_id"], enb)
                mongoUtils.update("func", epc["_id"], epc)
                functions_list.extend([epc["_id"], enb["_id"]])
        if not connections:
            return "Error: Not available Network Functions", 400
        for location in not_supp_loc:
            logger.warning(f"Location {location} not supported")
            req_slice_des["coverage"].remove(location)

    nest["connections"] = connections
    nest["functions"] = functions_list

    # Values to be copied to NEST
    KEYS_TO_BE_COPIED = (
        "network_DL_throughput",
        "ue_DL_throughput",
        "network_UL_throughput",
        "ue_UL_throughput",
        "group_communication_support",
        "mtu",
        "number_of_terminals",
        "positional_support",
        "radio_spectrum",
        "device_velocity",
        "terminal_density",
        "coverage",
    )
    for key in KEYS_TO_BE_COPIED:
        nest[key] = req_slice_des[key]

    # Create the shared value
    nest["shared"] = {
        "isolation": req_slice_des["isolation_level"],
        "simultaneous_nsi": req_slice_des["simultaneous_nsi"],
    }

    # ****** STEP 2: Service Descriptor ******
    if req["service_descriptor"]:
        req_service_des = req["service_descriptor"]
        # *** Recreate the NEST ***
        for req_key in SERVICE_DES_OBJ:
            req_service_des[req_key] = req_service_des.get(req_key, None)
        for req_key in SERVICE_DES_LIST:
            req_service_des[req_key] = req_service_des.get(req_key, [])
        # Create the NS field on Nest
        nest["ns_list"] = req_service_des["ns_list"]

        # # Replace Placement with location in each NS
        # for ns in nest["ns_list"]:
        #     ns["placement"] = (
        #         lambda x: {"location": ["Core"]} if not x else
        #         {"location": req_slice_des["coverage"]})(ns["placement"])

    # ****** STEP 3: Service Descriptor ******
    if req["test_descriptor"]:
        req_test_des = req["test_descriptor"]
        # *** Recreate the NEST ***
        for req_key in TEST_DES_OBJ:
            req_test_des[req_key] = req_test_des.get(req_key, None)
        for req_key in TEST_DES_LIST:
            req_test_des[req_key] = req_test_des.get(req_key, [])
        # Create the Probe field on Nest
        nest["probe_list"] = req_test_des["probe_list"]

    if not mongoUtils.find(
        "base_slice_des_ref",
        {"base_slice_des_id": req["base_slice_descriptor"]["base_slice_des_id"]},
    ):
        new_uuid = str(uuid.uuid4())
        req["base_slice_descriptor"]["_id"] = new_uuid
        mongoUtils.add("base_slice_des_ref", req["base_slice_descriptor"])
    return nest, 0
Esempio n. 20
0
    def post(self):
        """
        Add a new vim. The request must provide the vim details.
        used by: `katana vim add -f [file]`
        """
        new_uuid = str(uuid.uuid4())
        request.json["_id"] = new_uuid
        request.json["created_at"] = time.time()  # unix epoch
        request.json["tenants"] = {}

        # Check the required fields
        try:
            username = request.json["username"]
            password = request.json["password"]
            auth_url = request.json["auth_url"]
            project_name = request.json["admin_project_name"]
            location_id = request.json["location"].lower()
            request.json["location"] = location_id
            vim_id = request.json["id"]
        except KeyError:
            return f"Error: Required fields: {self.req_fields}", 400

        # Check that the VIM location is registered
        location = mongoUtils.find("location", {"id": location_id})
        if not location:
            return f"Location {location_id} is not registered. Please add the location first", 400
        location["vims"].append(vim_id)
        # Type of OpenStack
        if request.json["type"] == "openstack":
            try:
                new_vim = openstackUtils.Openstack(
                    uuid=new_uuid,
                    auth_url=auth_url,
                    project_name=project_name,
                    username=username,
                    password=password,
                )
                if new_vim.auth_error:
                    raise (AttributeError)
            except AttributeError:
                return "Error: VIM Error", 400
            else:
                request.json["resources"] = new_vim.get_resources()
                thebytes = pickle.dumps(new_vim)
                obj_json = {"_id": new_uuid, "id": request.json["id"], "obj": Binary(thebytes)}
                try:
                    vim_monitoring = request.json["infrastructure_monitoring"]
                except KeyError:
                    pass
                else:
                    with open("/targets/vim_targets.json", mode="r") as prom_file:
                        prom = json.load(prom_file)
                        prom.append({"targets": [vim_monitoring], "labels": {}})
                    with open("/targets/vim_targets.json", mode="w") as prom_file:
                        json.dump(prom, prom_file)
        # Type of OpenNebula
        elif request.json["type"] == "opennebula":
            try:
                new_vim = opennebulaUtils.Opennebula(
                    uuid=new_uuid,
                    auth_url=auth_url,
                    project_name=project_name,
                    username=username,
                    password=password,
                )
            except AttributeError:
                return "Error: VIM Error", 400
            else:
                request.json["resources"] = {"N/A": "N/A"}
                thebytes = pickle.dumps(new_vim)
                obj_json = {"_id": new_uuid, "id": request.json["id"], "obj": Binary(thebytes)}
        else:
            response = dumps({"error": "This type VIM is not supported"})
            return response, 400
        try:
            new_uuid = mongoUtils.add("vim", request.json)
        except pymongo.errors.DuplicateKeyError:
            return f"VIM with id {vim_id} already exists", 400
        mongoUtils.add("vim_obj", obj_json)
        if location:
            mongoUtils.update("location", location["_id"], location)
        return new_uuid, 201
Esempio n. 21
0
    def put(self, uuid):
        """
        Update the details of a specific vim.
        used by: `katana vim update -f [yaml file] [uuid]`
        """
        data = request.json
        new_uuid = uuid
        data["_id"] = uuid
        old_data = mongoUtils.get("vim", uuid)

        if old_data:
            data["created_at"] = old_data["created_at"]
            data["tenants"] = old_data["tenants"]
            try:
                for entry in self.req_fields:
                    if data[entry] != old_data[entry]:
                        return "Cannot update field: " + entry, 400
            except KeyError:
                return f"Error: Required fields: {self.req_fields}", 400
            else:
                mongoUtils.update("vim", uuid, data)
            return f"Modified {uuid}", 200
        else:
            request.json["_id"] = new_uuid
            request.json["created_at"] = time.time()  # unix epoch
            request.json["tenants"] = {}

            try:
                username = request.json["username"]
                password = request.json["password"]
                auth_url = request.json["auth_url"]
                project_name = request.json["admin_project_name"]
                vim_id = request.json["id"]
            except KeyError:
                return f"Error: Required fields: {self.req_fields}", 400
            if request.json["type"] == "openstack":
                try:
                    new_vim = openstackUtils.Openstack(
                        uuid=new_uuid,
                        auth_url=auth_url,
                        project_name=project_name,
                        username=username,
                        password=password,
                    )
                    if new_vim.auth_error:
                        raise (AttributeError)
                except AttributeError as e:
                    response = dumps({"error": "Openstack auth failed." + e})
                    return response, 400
                else:
                    request.json["resources"] = new_vim.get_resources()
                    thebytes = pickle.dumps(new_vim)
                    obj_json = {
                        "_id": new_uuid,
                        "id": request.json["id"],
                        "obj": Binary(thebytes)
                    }
            elif request.json["type"] == "opennebula":
                try:
                    new_vim = opennebulaUtils.Opennebula(
                        uuid=new_uuid,
                        auth_url=auth_url,
                        project_name=project_name,
                        username=username,
                        password=password,
                    )
                except AttributeError as e:
                    response = dumps({"Error": "OpenNebula auth failed." + e})
                    return response, 400
                else:
                    request.json["resources"] = {"N/A": "N/A"}
                    thebytes = pickle.dumps(new_vim)
                    obj_json = {
                        "_id": new_uuid,
                        "id": request.json["id"],
                        "obj": Binary(thebytes)
                    }
            else:
                response = dumps({"error": "This type VIM is not supported"})
                return response, 400
            try:
                new_uuid = mongoUtils.add("vim", request.json)
            except pymongo.errors.DuplicateKeyError:
                return f"VIM with id {vim_id} already exists", 400
            mongoUtils.add("vim_obj", obj_json)
            return f"Created {new_uuid}", 201
Esempio n. 22
0
    def put(self, uuid):
        """
        Update the details of a specific nfvo.
        used by: `katana nfvo update -f [yaml file] [uuid]`
        """
        data = request.json
        data["_id"] = uuid
        old_data = mongoUtils.get("nfvo", uuid)

        if old_data:
            data["created_at"] = old_data["created_at"]
            data["tenants"] = old_data["tenants"]
            try:
                for entry in self.req_fields:
                    if data[entry] != old_data[entry]:
                        return "Cannot update field: " + entry, 400
            except KeyError:
                return f"Error: Required fields: {self.req_fields}", 400
            else:
                mongoUtils.update("nfvo", uuid, data)
            return f"Modified {uuid}", 200
        else:
            new_uuid = uuid
            data = request.json
            data["_id"] = new_uuid
            data["created_at"] = time.time()  # unix epoch
            data["tenants"] = {}

            if request.json["type"] == "OSM":
                # Create the NFVO object
                try:
                    osm_username = request.json["nfvousername"]
                    osm_password = request.json["nfvopassword"]
                    osm_ip = request.json["nfvoip"]
                    osm_project_name = request.json["tenantname"]
                    nfvo_id = request.json["id"]
                except KeyError:
                    return f"Error: Required fields: {self.req_fields}", 400
                else:
                    osm = osmUtils.Osm(nfvo_id, osm_ip, osm_username,
                                       osm_password, osm_project_name)
                try:
                    osm.getToken()
                except ConnectTimeout as e:
                    logger.exception("Connection Timeout: {}".format(e))
                    response = dumps({"error": "Unable to connect to NFVO"})
                    return (response, 400)
                except ConnectionError as e:
                    logger.exception("Connection Error: {}".format(e))
                    response = dumps({"error": "Unable to connect to NFVO"})
                    return (response, 400)
                else:
                    # Store the osm object to the mongo db
                    thebytes = pickle.dumps(osm)
                    obj_json = {
                        "_id": new_uuid,
                        "id": data["id"],
                        "obj": Binary(thebytes)
                    }
                    try:
                        new_uuid = mongoUtils.add("nfvo", data)
                    except pymongo.errors.DuplicateKeyError:
                        return f"NFVO with id {nfvo_id} already exists", 400
                    mongoUtils.add("nfvo_obj", obj_json)
                    # Get information regarding VNFDs and NSDs
                    osmUtils.bootstrapNfvo(osm)
            else:
                response = dumps({"error": "This type nfvo is not supported"})
                return response, 400
            return f"Created {new_uuid}", 201
Esempio n. 23
0
    def post(self):
        """
        Add a new vim. The request must provide the vim details.
        used by: `katana vim add -f [yaml file]`
        """
        new_uuid = str(uuid.uuid4())
        request.json["_id"] = new_uuid
        request.json["created_at"] = time.time()  # unix epoch
        request.json["tenants"] = {}

        try:
            username = request.json["username"]
            password = request.json["password"]
            auth_url = request.json["auth_url"]
            project_name = request.json["admin_project_name"]
            vim_id = request.json["id"]
        except KeyError:
            return f"Error: Required fields: {self.req_fields}", 400
        if request.json["type"] == "openstack":
            try:
                new_vim = openstackUtils.Openstack(
                    uuid=new_uuid,
                    auth_url=auth_url,
                    project_name=project_name,
                    username=username,
                    password=password,
                )
                if new_vim.auth_error:
                    raise (AttributeError)
            except AttributeError as e:
                response = dumps({"error": "Openstack auth failed." + e})
                return response, 400
            else:
                request.json["resources"] = new_vim.get_resources()
                thebytes = pickle.dumps(new_vim)
                obj_json = {
                    "_id": new_uuid,
                    "id": request.json["id"],
                    "obj": Binary(thebytes)
                }
        elif request.json["type"] == "opennebula":
            try:
                new_vim = opennebulaUtils.Opennebula(
                    uuid=new_uuid,
                    auth_url=auth_url,
                    project_name=project_name,
                    username=username,
                    password=password,
                )
            except AttributeError as e:
                response = dumps({"Error": "OpenNebula auth failed." + e})
                return response, 400
            else:
                request.json["resources"] = {"N/A": "N/A"}
                thebytes = pickle.dumps(new_vim)
                obj_json = {
                    "_id": new_uuid,
                    "id": request.json["id"],
                    "obj": Binary(thebytes)
                }
        else:
            response = dumps({"error": "This type VIM is not supported"})
            return response, 400
        try:
            new_uuid = mongoUtils.add("vim", request.json)
        except pymongo.errors.DuplicateKeyError:
            return f"VIM with id {vim_id} already exists", 400
        mongoUtils.add("vim_obj", obj_json)
        return f"Created {new_uuid}", 201
Esempio n. 24
0
def add_slice(nest_req):
    """
    Creates the network slice
    """

    # Recreate the NEST with None options where missiong
    nest = {
        "_id": nest_req["_id"],
        "status": "Init",
        "created_at": time.time(),  # unix epoch
        "deployment_time": {
            "Placement_Time": None,
            "Provisioning_Time": None,
            "WAN_Deployment_Time": None,
            "NS_Deployment_Time": None,
            "Radio_Configuration_Time": None,
            "Slice_Deployment_Time": None,
        },
    }
    mongoUtils.add("slice", nest)
    for nest_key in NEST_KEYS_OBJ:
        nest[nest_key] = nest_req.get(nest_key, None)
    for nest_key in NEST_KEYS_LIST:
        nest[nest_key] = nest_req.get(nest_key, [])

    # Check if slice monitoring has been enabled
    monitoring = os.getenv("KATANA_MONITORING", None)
    wim_monitoring = {}
    mon_producer = None
    if monitoring:
        # Create the Kafka producer
        mon_producer = create_producer()
        nest["slice_monitoring"] = {}

    # **** STEP-1: Placement ****
    nest["status"] = "Placement"
    if monitoring:
        mon_producer.send(
            "nfv_mon",
            value={
                "action": "katana_mon",
                "slice_info": {
                    "slice_id": nest["_id"],
                    "status": "placement"
                },
            },
        )

    nest["conf_comp"] = {"nf": [], "ems": []}
    mongoUtils.update("slice", nest["_id"], nest)
    logger.info(f"{nest['_id']} Status: Placement")
    placement_start_time = time.time()

    # Initiate the lists
    vim_dict = {}
    total_ns_list = []
    ems_messages = {}

    # Get Details for the Network Services
    # i) The NS part of the core slice
    inst_functions = {}
    for connection in nest["connections"]:
        for key in connection:
            # Check if the function has been instantiated from another connection
            if connection[key]["_id"] in inst_functions:
                connection[key] = inst_functions[connection[key]["_id"]]
                continue
            # Check if the function is shared with another slice
            # shared_check values: 0: No shared, 1: First shared, 2: Shared
            shared_check = 0
            shared_slice_list_key = None
            try:
                shared_slice_list_key = nest["shared"][key][connection[key]
                                                            ["_id"]]
                shared_slice_list = connection[key]["shared"]["sharing_list"][
                    shared_slice_list_key]
                if len(shared_slice_list) > 1:
                    shared_check = 2
                else:
                    shared_check = 1
            except KeyError:
                pass
            try:
                err, pop_list = ns_details(
                    connection[key]["ns_list"],
                    connection[key]["location"],
                    vim_dict,
                    total_ns_list,
                    shared_check,
                    shared_slice_list_key,
                )
                if pop_list:
                    connection[key]["ns_list"] = [
                        x for x in connection[key]["ns_list"]
                        if x not in pop_list
                    ]
                if err:
                    nest["status"] = f"Failed - {err}"
                    nest["ns_inst_info"] = {}
                    nest["total_ns_list"] = []
                    mongoUtils.update("slice", nest["_id"], nest)
                    return
                inst_functions[connection[key]["_id"]] = connection[key]
            except KeyError:
                continue

    # ii) The extra NS of the slice
    for location in nest["coverage"]:
        err, _ = ns_details(nest["ns_list"], location, vim_dict, total_ns_list)
        if err:
            nest["status"] = f"Failed - {err}"
            nest["ns_inst_info"] = {}
            nest["total_ns_list"] = []
            mongoUtils.update("slice", nest["_id"], nest)
            return

    nest["vim_list"] = vim_dict
    nest["total_ns_list"] = total_ns_list
    nest["deployment_time"]["Placement_Time"] = format(
        time.time() - placement_start_time, ".4f")

    # **** STEP-2: Resource Provisioning ****
    nest["status"] = "Provisioning"
    if monitoring:
        mon_producer.send(
            "nfv_mon",
            value={
                "action": "katana_mon",
                "slice_info": {
                    "slice_id": nest["_id"],
                    "status": "provisioning"
                },
            },
        )
    mongoUtils.update("slice", nest["_id"], nest)
    logger.info(f"{nest['_id']} Status: Provisioning")
    prov_start_time = time.time()

    # *** STEP-2a: Cloud ***
    # *** STEP-2a-i: Create the new tenant/project on the VIM ***
    for num, (vim, vim_info) in enumerate(vim_dict.items()):
        if vim_info["shared"]:
            vim_id = vim[:-2]
        else:
            vim_id = vim
        target_vim = mongoUtils.find("vim", {"id": vim_id})
        target_vim_obj = pickle.loads(
            mongoUtils.find("vim_obj", {"id": vim_id})["obj"])

        # Define project parameters
        if vim_info["shared"] == 1:
            name = "vim_{0}_katana_{1}_shared".format(
                num, vim_info["shared_slice_list_key"])
            tenant_name = vim_info["shared_slice_list_key"]
        elif vim_info["shared"] == 0:
            name = "vim_{0}_katana_{1}".format(num, nest["_id"])
            tenant_name = nest["_id"]
        else:
            # Find the shared list
            sharing_lists = mongoUtils.get("sharing_lists",
                                           vim_info["shared_slice_list_key"])
            vim_dict[vim] = sharing_lists["vims"][target_vim["id"]]
            mongoUtils.update("slice", nest["_id"], nest)
            continue
        tenant_project_name = name
        tenant_project_description = name
        tenant_project_user = name
        tenant_project_password = "******"
        # If the vim is Openstack type, set quotas
        quotas = (vim_info["resources"] if target_vim["type"] == "openstack"
                  or target_vim["type"] == "Openstack" else None)
        ids = target_vim_obj.create_slice_prerequisites(
            tenant_project_name,
            tenant_project_description,
            tenant_project_user,
            tenant_project_password,
            nest["_id"],
            quotas=quotas,
        )
        # Register the tenant to the mongo db
        target_vim["tenants"][tenant_name] = name
        mongoUtils.update("vim", target_vim["_id"], target_vim)

        # STEP-2a-ii: Αdd the new VIM tenant to NFVO
        if target_vim["type"] == "openstack":
            # Update the config parameter for the tenant
            config_param = dict(security_groups=ids["secGroupName"])
        elif target_vim["type"] == "opennebula":
            config_param = target_vim["config"]
        else:
            config_param = {}

        for nfvo_id in vim_info["nfvo_list"]:
            target_nfvo = mongoUtils.find("nfvo", {"id": nfvo_id})
            target_nfvo_obj = pickle.loads(
                mongoUtils.find("nfvo_obj", {"id": nfvo_id})["obj"])
            vim_id = target_nfvo_obj.addVim(
                tenant_project_name,
                target_vim["password"],
                target_vim["type"],
                target_vim["auth_url"],
                target_vim["username"],
                config_param,
            )
            vim_info["nfvo_vim_account"] = vim_info.get("nfvo_vim_account", {})
            vim_info["nfvo_vim_account"][nfvo_id] = vim_id
            # Register the tenant to the mongo db
            target_nfvo["tenants"][tenant_name] = target_nfvo["tenants"].get(
                nest["_id"], [])
            target_nfvo["tenants"][tenant_name].append(vim_id)
            mongoUtils.update("nfvo", target_nfvo["_id"], target_nfvo)

        if vim_info["shared"] == 1:
            sharing_lists = mongoUtils.get("sharing_lists",
                                           vim_info["shared_slice_list_key"])
            sharing_lists["vims"] = sharing_lists.get("vims", {})
            sharing_lists["vims"][target_vim["id"]] = vim_info
            mongoUtils.update("sharing_lists",
                              vim_info["shared_slice_list_key"], sharing_lists)

    mongoUtils.update("slice", nest["_id"], nest)
    # *** STEP-2b: WAN ***
    if mongoUtils.count("wim") <= 0:
        logger.warning("There is no registered WIM")
    else:
        wan_start_time = time.time()
        # Crate the data for the WIM
        wim_data = {
            "_id": nest["_id"],
            "core_connections": [],
            "extra_ns": [],
            "slice_sla": {}
        }
        # i) Create the slice_sla data for the WIM
        wim_data["slice_sla"] = {
            "network_DL_throughput": nest["network_DL_throughput"],
            "network_UL_throughput": nest["network_UL_throughput"],
            "mtu": nest["mtu"],
        }
        # ii) Add the connections
        for connection in nest["connections"]:
            data = {}
            for key in connection:
                key_data = {}
                try:
                    ns_l = connection[key]["ns_list"]
                except KeyError:
                    pass
                else:
                    key_data["ns"] = []
                    for ns in ns_l:
                        if ns["placement_loc"] not in key_data["ns"]:
                            key_data["ns"].append(ns["placement_loc"])
                try:
                    pnf_l = connection[key]["pnf_list"]
                except KeyError:
                    pass
                else:
                    key_data["pnf"] = pnf_l
                if key_data:
                    data[key] = key_data
            if data:
                wim_data["core_connections"].append(data)
        # iii) Add the extra Network Services
        for ns in nest["ns_list"]:
            if ns["placement_loc"] not in wim_data["extra_ns"]:
                wim_data["extra_ns"].append(ns["placement_loc"])
        # iV) Add the probes
        wim_data["probes"] = nest["probe_list"]
        # Select WIM - Assume that there is only one registered
        wim_list = list(mongoUtils.index("wim"))
        target_wim = wim_list[0]
        target_wim_id = target_wim["id"]
        target_wim_obj = pickle.loads(
            mongoUtils.find("wim_obj", {"id": target_wim_id})["obj"])
        target_wim_obj.create_slice(wim_data)
        nest["wim_data"] = wim_data
        target_wim["slices"][nest["_id"]] = nest["_id"]
        mongoUtils.update("slice", nest["_id"], nest)
        mongoUtils.update("wim", target_wim["_id"], target_wim)
        # Add monitoring from WIM in nest
        try:
            wim_monitoring = target_wim["monitoring-url"]
            nest["slice_monitoring"]["WIM"] = wim_monitoring
        except KeyError:
            pass
        nest["deployment_time"]["WAN_Deployment_Time"] = format(
            time.time() - wan_start_time, ".4f")
    nest["deployment_time"]["Provisioning_Time"] = format(
        time.time() - prov_start_time, ".4f")

    # **** STEP-3: Slice Activation Phase****
    nest["status"] = "Activation"
    if monitoring:
        mon_producer.send(
            "nfv_mon",
            value={
                "action": "katana_mon",
                "slice_info": {
                    "slice_id": nest["_id"],
                    "status": "activation"
                },
            },
        )
    mongoUtils.update("slice", nest["_id"], nest)
    logger.info(f"{nest['_id']} Status: Activation")
    # *** STEP-3a: Cloud ***
    # Instantiate NS
    # Store info about instantiated NSs
    ns_inst_info = {}
    nest["deployment_time"]["NS_Deployment_Time"] = {}
    for ns in total_ns_list:
        ns["start_time"] = time.time()
        if ns["shared_function"] == 2:
            # The ns is already instantiated and there is no need to instantiate again
            # Find the sharing list
            shared_list = mongoUtils.get("sharing_lists",
                                         ns["shared_slice_key"])
            ns_inst_info[ns["ns-id"]] = shared_list["ns_list"][ns["nsd-id"]]
            nest["conf_comp"]["nf"].append(ns["nsd-id"])
            continue
        ns_inst_info[ns["ns-id"]] = {}
        target_nfvo = mongoUtils.find("nfvo", {"id": ns["nfvo-id"]})
        target_nfvo_obj = pickle.loads(
            mongoUtils.find("nfvo_obj", {"id": ns["nfvo-id"]})["obj"])
        selected_vim = ns["placement_loc"]["vim"]
        nfvo_vim_account = vim_dict[selected_vim]["nfvo_vim_account"][
            ns["nfvo-id"]]
        nfvo_inst_ns = target_nfvo_obj.instantiateNs(ns["ns-name"],
                                                     ns["nsd-id"],
                                                     nfvo_vim_account)
        ns_inst_info[ns["ns-id"]][ns["placement_loc"]["location"]] = {
            "nfvo_inst_ns": nfvo_inst_ns,
            "nfvo-id": ns["nfvo-id"],
            "ns-name": ns["ns-name"],
            "slice_id": nest["_id"],
            "vim": selected_vim,
        }
        # Check if this the first slice of a sharing list
        if ns["shared_function"] == 1:
            shared_list = mongoUtils.get("sharing_lists",
                                         ns["shared_slice_key"])
            ns_inst_info[ns["ns-id"]][ns["placement_loc"]
                                      ["location"]]["shared"] = True
            ns_inst_info[ns["ns-id"]][ns["placement_loc"]["location"]][
                "sharing_list"] = ns["shared_slice_key"]
            shared_list["ns_list"][ns["nsd-id"]] = ns_inst_info[ns["ns-id"]]
            mongoUtils.update("sharing_lists", ns["shared_slice_key"],
                              shared_list)
        nest["conf_comp"]["nf"].append(ns["nsd-id"])
        time.sleep(4)
        time.sleep(2)

    # Get the nsr for each service and wait for the activation
    for ns in total_ns_list:
        target_nfvo = mongoUtils.find("nfvo", {"id": ns["nfvo-id"]})
        target_nfvo_obj = pickle.loads(
            mongoUtils.find("nfvo_obj", {"id": ns["nfvo-id"]})["obj"])
        site = ns["placement_loc"]
        nfvo_inst_ns_id = ns_inst_info[ns["ns-id"]][
            site["location"]]["nfvo_inst_ns"]
        insr = target_nfvo_obj.getNsr(nfvo_inst_ns_id)
        while insr["operational-status"] != "running" or insr[
                "config-status"] != "configured":
            if insr["operational-status"] == "failed":
                error_message = (
                    f"Network Service {ns['nsd-id']} failed to start on NFVO {ns['nfvo-id']}."
                )
                logger.error(error_message)
                nest["ns_inst_info"] = ns_inst_info
                nest["status"] = f"Failed - {error_message}"
                mongoUtils.update("slice", nest["_id"], nest)
                return
            time.sleep(10)
            insr = target_nfvo_obj.getNsr(nfvo_inst_ns_id)
        nest["deployment_time"]["NS_Deployment_Time"][ns["ns-name"]] = format(
            time.time() - ns["start_time"], ".4f")
        # Get the IPs of the instantiated NS
        vnf_list = []
        vnfr_id_list = target_nfvo_obj.getVnfrId(insr)
        for ivnfr_id in vnfr_id_list:
            vnfr = target_nfvo_obj.getVnfr(ivnfr_id)
            vnf_list.append(target_nfvo_obj.getIPs(vnfr))
        ns_inst_info[ns["ns-id"]][site["location"]]["vnfr"] = vnf_list

    nest["ns_inst_info"] = ns_inst_info
    mongoUtils.update("slice", nest["_id"], nest)

    # If monitoring parameter is set, send the ns_list to nfv_mon module
    if monitoring and mon_producer:
        mon_producer.send(topic="nfv_mon",
                          value={
                              "action": "create",
                              "ns_list": ns_inst_info
                          })
        nest["slice_monitoring"]["nfv_ns_status_monitoring"] = True

    # *** STEP-3b: Radio Slice Configuration ***
    if mongoUtils.count("ems") <= 0:
        logger.warning("There is no registered EMS")
    else:
        # Add the management IPs for the NS sent ems in ems_messages:
        ems_radio_data = {
            "ue_DL_throughput": nest["ue_DL_throughput"],
            "ue_UL_throughput": nest["ue_UL_throughput"],
            "group_communication_support": nest["group_communication_support"],
            "number_of_terminals": nest["number_of_terminals"],
            "positional_support": nest["positional_support"],
            "radio_spectrum": nest["radio_spectrum"],
            "device_velocity": nest["device_velocity"],
            "terminal_density": nest["terminal_density"],
        }
        radio_start_time = time.time()
        iii = 0
        for connection in nest["connections"]:
            iii += 1
            data = {}
            ems_id_list = []
            for key in connection:
                # Check if the connection is shared
                try:
                    shared_slice_list_key = nest["shared"][key][connection[key]
                                                                ["_id"]]
                    shared_slice_list = connection[key]["shared"][
                        "sharing_list"][shared_slice_list_key]
                    shared = True
                    if len(shared_slice_list) > 1:
                        shared_check = 2
                    else:
                        shared_check = 1
                except KeyError:
                    shared_slice_list_key = None
                    shared = False
                key_data = {}
                try:
                    ems_id = connection[key]["ems-id"]
                except KeyError:
                    continue
                else:
                    if ems_id not in ems_id_list:
                        ems_id_list.append(ems_id)
                    try:
                        ns_l = connection[key]["ns_list"]
                    except KeyError:
                        pass
                    else:
                        key_data["ns"] = []
                        for ns in ns_l:
                            try:
                                ns_info = ns_inst_info[ns["ns-id"]][
                                    connection[key]["location"]]
                            except KeyError:
                                ns_info = ns_inst_info[ns["ns-id"]]["Core"]
                            ns_data = {
                                "name": ns["ns-name"],
                                "location": ns["placement_loc"]["location"],
                                "vnf_list": ns_info["vnfr"],
                            }
                            # Add the shared information for the ns, if any
                            if shared:
                                ns_data["shared"] = ns_inst_info[ns["ns-id"]][
                                    connection[key]["location"]]["shared"]
                                ns_data["sharing_list"] = ns_inst_info[
                                    ns["ns-id"]][connection[key]
                                                 ["location"]]["sharing_list"]
                            else:
                                ns_data["shared"] = False
                            key_data["ns"].append(ns_data)
                try:
                    key_data["pnf"] = connection[key]["pnf_list"]
                except KeyError:
                    pass
                else:
                    if shared:
                        for ipnf in connection[key]["pnf_list"]:
                            ipnf["shared"] = True
                            ipnf["sharing_list"] = shared_slice_list_key
                if key_data:
                    data[key] = key_data
            if data:
                data["slice_sla"] = ems_radio_data
                data["slice_id"] = nest["_id"]
                for ems_id in ems_id_list:
                    messages = ems_messages.get(ems_id, [])
                    messages.append(data)
                    ems_messages[ems_id] = messages

        for ems_id, ems_message in ems_messages.items():
            # Find the EMS
            target_ems = mongoUtils.find("ems", {"id": ems_id})
            if not target_ems:
                # Error handling: There is no such EMS
                logger.error(
                    "EMS {} not found - No configuration".format(ems_id))
                continue
            target_ems_obj = pickle.loads(
                mongoUtils.find("ems_obj", {"id": ems_id})["obj"])
            # Send the message
            for imessage in ems_message:
                target_ems_obj.conf_radio(imessage)
            nest["conf_comp"]["ems"].append(ems_id)
        nest["ems_data"] = ems_messages
        nest["deployment_time"]["Radio_Configuration_Time"] = format(
            time.time() - radio_start_time, ".4f")

    # *** STEP-4: Finalize ***
    # Create Grafana Dashboard for monitoring
    # Create the NS status panel
    if monitoring:
        # Open the Grafana Dashboard template
        monitoring_slice_id = "slice_" + nest["_id"].replace("-", "_")
        with open("/katana-grafana/templates/new_dashboard.json",
                  mode="r") as dashboard_file:
            new_dashboard = json.load(dashboard_file)
            new_dashboard["dashboard"]["title"] = monitoring_slice_id
            new_dashboard["dashboard"]["uid"] = nest["_id"]
        # Add the dashboard panels
        # Add the NS Status panels
        expr = "ns_status" + '{slice_id="' + nest["_id"] + '"}'
        targets = [{
            "expr": expr,
            "legendFormat": "",
            "interval": "",
            "format": "table",
            "instant": True
        }]
        infra_targets = {}
        for ns in ns_inst_info.values():
            for key, value in ns.items():
                # Check if the VIM supports infrastructure monitoring
                search_vim_id = value["vim"]
                if value.get("shared", False):
                    search_vim_id = search_vim_id[:-2]
                selected_vim = mongoUtils.find("vim", {"id": search_vim_id})
                try:
                    vim_monitoring = selected_vim["type"]
                    vim_monitoring_list = infra_targets.get(vim_monitoring, [])
                    for ivnf in value["vnfr"]:
                        vim_monitoring_list += ivnf["vm_list"]
                    infra_targets[vim_monitoring] = vim_monitoring_list
                except KeyError:
                    pass
        # Create the VM Monitoring panels
        PANELS = [
            "vm_state",
            "vm_cpu_cpu_time",
            "vm_cpu_overall_cpu_usage",
            "vm_memory_actual",
            "vm_memory_available",
            "vm_memory_usage",
            "vm_disk_read_bytes",
            "vm_disk_write_bytes",
            "vm_disk_errors",
        ]
        with open("/katana-grafana/templates/new_vm_monitoring_panel.json",
                  mode="r") as panel_file:
            vm_panel_template = json.load(panel_file)
            for i, panel in enumerate(PANELS):
                vm_panel = copy.deepcopy(vm_panel_template)
                vm_panel["title"] = panel
                vm_panel["gridPos"] = {"h": 8, "w": 12, "x": 13, "y": i * 9}
                vm_panel["id"] = 10 + i
                vm_targets = []
                for vim_type, vm_list in infra_targets.items():
                    for vm in vm_list:
                        expr = (vim_type + "_" + panel + '{project=~".*' +
                                nest["_id"] + '",vm_name="' + vm + '"}')
                        vm_targets.append({
                            "expr": expr,
                            "interval": "",
                            "legendFormat": ""
                        })
                vm_panel["targets"] = vm_targets
                new_dashboard["dashboard"]["panels"].append(vm_panel)
        # Read and fill the NS Status panel template
        with open("/katana-grafana/templates/new_ns_status_panel.json",
                  mode="r") as panel_file:
            ns_panel = json.load(panel_file)
            ns_panel["targets"] = targets
            new_dashboard["dashboard"]["panels"].append(ns_panel)
        # Add the WIM Monitoring panel
        if wim_monitoring:
            # Read and fill the panel template
            with open("/katana-grafana/templates/new_wim_panel.json",
                      mode="r") as panel_file:
                wim_panel = json.load(panel_file)
                wim_panel["targets"].append({
                    "expr": f"rate({monitoring_slice_id}_flows[1m])",
                    "interval": "",
                    "legendFormat": "",
                    "refId": "A",
                })
                new_dashboard["dashboard"]["panels"].append(wim_panel)
        mon_producer.send(
            "nfv_mon",
            value={
                "action": "katana_mon",
                "slice_info": {
                    "slice_id": nest["_id"],
                    "status": "running"
                },
            },
        )

        # Use the Grafana API in order to create the new dashboard for the new slice
        grafana_url = "http://katana-grafana:3000/api/dashboards/db"
        headers = {
            "accept": "application/json",
            "content-type": "application/json"
        }
        grafana_user = os.getenv("GF_SECURITY_ADMIN_USER", "admin")
        grafana_passwd = os.getenv("GF_SECURITY_ADMIN_PASSWORD", "admin")
        r = requests.post(
            url=grafana_url,
            headers=headers,
            auth=(grafana_user, grafana_passwd),
            data=json.dumps(new_dashboard),
        )
        logger.info(f"Created new Grafana dashboard for slice {nest['_id']}")
    logger.info(f"{nest['_id']} Status: Running")
    nest["status"] = "Running"
    nest["deployment_time"]["Slice_Deployment_Time"] = format(
        time.time() - nest["created_at"], ".4f")
    mongoUtils.update("slice", nest["_id"], nest)
Esempio n. 25
0
def nest_mapping(req):
    """
    Function that maps nest to the underlying network functions
    """

    # Store the gst in DB
    mongoUtils.add("gst", req)

    nest = {"_id": req["_id"]}

    # Recreate the nest req

    # Check if the base_slice_des_ref or the required fields are set
    base_slice_des_ref = req["base_slice_descriptor"].get("base_slice_des_ref", None)
    if not base_slice_des_ref:
        for req_key in REQ_FIELDS:
            if req_key not in req["base_slice_descriptor"]:
                logger.error("Required field base_slice_descriptor.{} is missing".format(req_key))
                return (
                    "Error: Required field base_slice_descriptor.{} is missing".format(req_key),
                    400,
                )

    for field in NEST_FIELDS:
        req[field] = req.get(field, None)

    # ****** STEP 1: Slice Descriptor ******
    if not req["base_slice_descriptor"]:
        logger.error("No Base Slice Descriptor given - Exit")
        return "NEST Error: No Base Slice Descriptor given", 400
    req_slice_des = req["base_slice_descriptor"]
    # *** Recreate the NEST ***
    for req_key in SLICE_DES_OBJ:
        req_slice_des[req_key] = req_slice_des.get(req_key, None)
    for req_key in SLICE_DES_LIST:
        req_slice_des[req_key] = req_slice_des.get(req_key, [])

    # *** Check if there are references for slice ***
    if req_slice_des["base_slice_des_ref"]:
        ref_slice = mongoUtils.find(
            "base_slice_des_ref", {"base_slice_des_id": req_slice_des["base_slice_des_ref"]}
        )
        if ref_slice:
            for key, value in req_slice_des.items():
                try:
                    if value is None or value == []:
                        req_slice_des[key] = ref_slice[key]
                except KeyError:
                    continue
        else:
            logger.error(
                "slice_descriptor {} not found".format(req_slice_des["base_slice_des_ref"])
            )
            return "Error: referenced slice_descriptor not found", 400

    # Create the shared value
    nest["shared"] = {
        "isolation": req_slice_des["isolation"],
        "simultaneous_nsi": req_slice_des["simultaneous_nsi"],
    }

    # Check that the location in coverage field is registered
    not_supp_loc = []
    for location_id in req_slice_des["coverage"]:
        if not mongoUtils.find("location", {"id": location_id.lower()}):
            not_supp_loc.append(location_id)
            logger.warning(f"Location {location_id} is not registered")

    for location_id in not_supp_loc:
        req_slice_des["coverage"].remove(location_id)

    # *************************** Start the mapping ***************************
    # Currently supports:
    # 1) If delay_tolerance --> EMBB else --> URLLC
    #    If EMBB --> EPC Placement=@Core. If URLLC --> EPC Placement=@Edge
    # 2) If network throughput > 100 Mbps --> Type=5G
    # *************************************************************************
    functions_list = []

    if req_slice_des["network_DL_throughput"]["guaranteed"] > 100000:
        gen = 5
    else:
        gen = 4

    # *** Calculate the type of the slice (sst) ***
    if req_slice_des["delay_tolerance"]:
        # EMBB
        nest["sst"] = 1
        # Find the registered function for Core Function
        epc = mongoUtils.find("func", calc_find_data(gen, "core", 0))
        if not epc:
            return "Error: Not available Core Network Functions", 400
        # Check if the nest allows shareable functions, if the function is shareable
        if (
            req_slice_des["isolation"] != 1
            and req_slice_des["isolation"] != 3
            and epc["shared"]["availability"]
        ):
            found_list_key = None
            max_len = epc["shared"].get("max_shared", 0)
            for grouped_nest_key, grouped_nest_list in epc["shared"]["sharing_list"].items():
                if len(grouped_nest_list) < max_len or not max_len:
                    found_list_key = grouped_nest_key
                    grouped_nest_list.append(nest["_id"])
                    sharing_list = mongoUtils.get("sharing_lists", found_list_key)
                    sharing_list["nest_list"].append(nest["_id"])
                    mongoUtils.update("sharing_lists", found_list_key, sharing_list)
                    break
            if not found_list_key:
                found_list_key = str(uuid.uuid4())
                epc["shared"]["sharing_list"][found_list_key] = [nest["_id"]]
                data = {
                    "_id": found_list_key,
                    "nest_list": [nest["_id"]],
                    "ns_list": {},
                }
                mongoUtils.add("sharing_lists", data)
            nest["shared"]["core"] = {epc["_id"]: found_list_key}
        connections = []
        not_supp_loc = []
        for location in req_slice_des["coverage"]:
            enb = mongoUtils.find("func", calc_find_data(gen, location.lower(), 1))
            if not enb:
                not_supp_loc.append(location)
            else:
                # Check if the nest allows shareable functions, if the function is shareable
                if req_slice_des["isolation"] < 2 and enb["shared"]["availability"]:
                    found_list_key = None
                    max_len = enb["shared"].get("max_shared", 0)
                    for grouped_nest_key, grouped_nest_list in enb["shared"][
                        "sharing_list"
                    ].items():
                        if len(grouped_nest_list) < max_len or not max_len:
                            found_list_key = grouped_nest_key
                            grouped_nest_list.append(nest["_id"])
                            sharing_list = mongoUtils.get("sharing_lists", found_list_key)
                            sharing_list["nest_list"].append(nest["_id"])
                            mongoUtils.update("sharing_lists", found_list_key, sharing_list)
                            break
                    if not found_list_key:
                        found_list_key = str(uuid.uuid4())
                        enb["shared"]["sharing_list"][found_list_key] = [nest["_id"]]
                        data = {
                            "_id": found_list_key,
                            "nest_list": [nest["_id"]],
                            "ns_list": {},
                        }
                        mongoUtils.add("sharing_lists", data)
                    nest["shared"]["radio"] = nest["shared"].get("radio", {})
                    nest["shared"]["radio"][enb["_id"]] = found_list_key
                connections.append({"core": epc, "radio": enb})
                enb["tenants"].append(nest["_id"])
                mongoUtils.update("func", enb["_id"], enb)
                functions_list.append(enb["_id"])
        if not epc or not connections:
            return "Error: Not available Network Functions", 400
        epc["tenants"].append(nest["_id"])
        mongoUtils.update("func", epc["_id"], epc)
        functions_list.append(epc["_id"])
        for location in not_supp_loc:
            logger.warning(f"Location {location} not supported")
            req_slice_des["coverage"].remove(location)
    else:
        # URLLC
        nest["sst"] = 2
        connections = []
        not_supp_loc = []
        for location in req_slice_des["coverage"]:
            epc = mongoUtils.find("func", calc_find_data(gen, location.lower(), 0))
            enb = mongoUtils.find("func", calc_find_data(gen, location.lower(), 1))
            if not epc or not enb:
                not_supp_loc.append(location)
            else:
                # Check if the nest allows shareable functions, if the function is shareable
                # For the Core function
                if (
                    req_slice_des["isolation"] != 1
                    and req_slice_des["isolation"] != 3
                    and epc["shared"]["availability"]
                ):
                    found_list_key = None
                    max_len = epc["shared"].get("max_shared", 0)
                    for grouped_nest_key, grouped_nest_list in epc["shared"][
                        "sharing_list"
                    ].items():
                        if len(grouped_nest_list) < max_len or not max_len:
                            found_list_key = grouped_nest_key
                            grouped_nest_list.append(nest["_id"])
                            sharing_list = mongoUtils.get("sharing_lists", found_list_key)
                            sharing_list["nest_list"].append(nest["_id"])
                            mongoUtils.update("sharing_lists", found_list_key, sharing_list)
                            break
                    if not found_list_key:
                        found_list_key = str(uuid.uuid4())
                        epc["shared"]["sharing_list"][found_list_key] = [nest["_id"]]
                        data = {
                            "_id": found_list_key,
                            "nest_list": [nest["_id"]],
                            "ns_list": {},
                        }
                        mongoUtils.add("sharing_lists", data)
                    nest["shared"]["core"] = nest["shared"].get("core", {})
                    nest["shared"]["core"][epc["_id"]] = found_list_key
                # For the RAN function
                if req_slice_des["isolation"] < 2 and enb["shared"]["availability"]:
                    found_list_key = None
                    max_len = enb["shared"].get("max_shared", 0)
                    for grouped_nest_key, grouped_nest_list in enb["shared"][
                        "sharing_list"
                    ].items():
                        if len(grouped_nest_list) < max_len or not max_len:
                            found_list_key = grouped_nest_key
                            grouped_nest_list.append(nest["_id"])
                            sharing_list = mongoUtils.get("sharing_lists", found_list_key)
                            sharing_list["nest_list"].append(nest["_id"])
                            mongoUtils.update("sharing_lists", found_list_key, sharing_list)
                            break
                    if not found_list_key:
                        found_list_key = str(uuid.uuid4())
                        enb["shared"]["sharing_list"][found_list_key] = [nest["_id"]]
                        data = {
                            "_id": found_list_key,
                            "nest_list": [nest["_id"]],
                            "ns_list": {},
                        }
                        mongoUtils.add("sharing_lists", data)
                    nest["shared"]["radio"] = nest["shared"].get("radio", {})
                    nest["shared"]["radio"][enb["_id"]] = found_list_key
                connections.append({"core": epc, "radio": enb})
                epc["tenants"].append(nest["_id"])
                enb["tenants"].append(nest["_id"])
                mongoUtils.update("func", enb["_id"], enb)
                mongoUtils.update("func", epc["_id"], epc)
                functions_list.extend([epc["_id"], enb["_id"]])
        if not connections:
            return "Error: Not available Network Functions", 400
        for location in not_supp_loc:
            logger.warning(f"Location {location} not supported")
            req_slice_des["coverage"].remove(location)

    nest["connections"] = connections
    nest["functions"] = functions_list

    # Values to be copied to NEST
    KEYS_TO_BE_COPIED = (
        "network_DL_throughput",
        "ue_DL_throughput",
        "network_UL_throughput",
        "ue_UL_throughput",
        "group_communication_support",
        "mtu",
        "number_of_terminals",
        "positional_support",
        "radio_spectrum",
        "device_velocity",
        "terminal_density",
        "coverage",
    )
    for key in KEYS_TO_BE_COPIED:
        nest[key] = req_slice_des[key]

    # Add slice_name to NEST based on the base_slice_des id
    nest["slice_name"] = req_slice_des["base_slice_des_id"]

    # ****** STEP 2: Service Descriptor ******
    if req["service_descriptor"]:
        req_service_des = req["service_descriptor"]
        # *** Recreate the NEST ***
        for req_key in SERVICE_DES_OBJ:
            req_service_des[req_key] = req_service_des.get(req_key, None)
        for req_key in SERVICE_DES_LIST:
            req_service_des[req_key] = req_service_des.get(req_key, [])
        # Create the NS field on Nest
        nest["ns_list"] = req_service_des["ns_list"]

        # # Replace Placement with location in each NS
        # for ns in nest["ns_list"]:
        #     ns["placement"] = (
        #         lambda x: {"location": ["Core"]} if not x else
        #         {"location": req_slice_des["coverage"]})(ns["placement"])

    # ****** STEP 3: Test Descriptor ******
    if req["test_descriptor"]:
        req_test_des = req["test_descriptor"]
        # *** Recreate the NEST ***
        for req_key in TEST_DES_OBJ:
            req_test_des[req_key] = req_test_des.get(req_key, None)
        for req_key in TEST_DES_LIST:
            req_test_des[req_key] = req_test_des.get(req_key, [])
        # Create the Probe field on Nest
        nest["probe_list"] = req_test_des["probe_list"]

    if (
        not mongoUtils.find(
            "base_slice_des_ref",
            {"base_slice_des_id": req["base_slice_descriptor"]["base_slice_des_id"]},
        )
        and req["base_slice_descriptor"]["base_slice_des_id"]
    ):
        new_uuid = str(uuid.uuid4())
        req["base_slice_descriptor"]["_id"] = new_uuid
        mongoUtils.add("base_slice_des_ref", req["base_slice_descriptor"])
    return nest, 0
Esempio n. 26
0
def add_slice(nest_req):
    """
    Creates the network slice
    """

    # Recreate the NEST with None options where missiong
    nest = {
        "_id": nest_req["_id"],
        "status": "Init",
        "created_at": time.time(),  # unix epoch
        "deployment_time": {
            "Placement_Time": None,
            "Provisioning_Time": None,
            "WAN_Deployment_Time": None,
            "NS_Deployment_Time": None,
            "Radio_Configuration_Time": None,
            "Slice_Deployment_Time": None,
        },
    }
    mongoUtils.add("slice", nest)
    for nest_key in NEST_KEYS_OBJ:
        nest[nest_key] = nest_req.get(nest_key, None)
    for nest_key in NEST_KEYS_LIST:
        nest[nest_key] = nest_req.get(nest_key, [])

    # **** STEP-1: Placement ****
    nest["status"] = "Placement"
    nest["conf_comp"] = {"nf": [], "ems": []}
    mongoUtils.update("slice", nest["_id"], nest)
    logger.info(f"{nest['_id']} Status: Placement")
    placement_start_time = time.time()

    # Initiate the lists
    vim_dict = {}
    total_ns_list = []
    ems_messages = {}

    # Get Details for the Network Services
    # i) The extra NS of the slice
    for location in nest["coverage"]:
        err, _ = ns_details(nest["ns_list"], location, vim_dict, total_ns_list)
        if err:
            delete_slice(nest)
            return
    del nest["ns_list"]
    nest["ns_list"] = copy.deepcopy(total_ns_list)
    # ii) The NS part of the core slice
    inst_functions = {}
    for connection in nest["connections"]:
        for key in connection:
            if connection[key]["_id"] in inst_functions:
                connection[key] = inst_functions[connection[key]["_id"]]
                continue
            try:
                err, pop_list = ns_details(
                    connection[key]["ns_list"], connection[key]["location"], vim_dict, total_ns_list
                )
                if pop_list:
                    connection[key]["ns_list"] = [
                        x for x in connection[key]["ns_list"] if x not in pop_list
                    ]
                if err:
                    delete_slice(nest)
                    return
                inst_functions[connection[key]["_id"]] = connection[key]
            except KeyError:
                continue

    nest["vim_list"] = vim_dict
    nest["total_ns_list"] = total_ns_list
    nest["deployment_time"]["Placement_Time"] = format(time.time() - placement_start_time, ".4f")

    # **** STEP-2: Resource Provisioning ****
    nest["status"] = "Provisioning"
    mongoUtils.update("slice", nest["_id"], nest)
    logger.info(f"{nest['_id']} Status: Provisioning")
    prov_start_time = time.time()

    # *** STEP-2a: Cloud ***
    # *** STEP-2a-i: Create the new tenant/project on the VIM ***
    for num, (vim, vim_info) in enumerate(vim_dict.items()):
        target_vim = mongoUtils.find("vim", {"id": vim})
        target_vim_obj = pickle.loads(mongoUtils.find("vim_obj", {"id": vim})["obj"])
        # Define project parameters
        tenant_project_name = "vim_{0}_katana_{1}".format(num, nest["_id"])
        tenant_project_description = "vim_{0}_katana_{1}".format(num, nest["_id"])
        tenant_project_user = "******".format(num, nest["_id"])
        tenant_project_password = "******"
        # If the vim is Openstack type, set quotas
        quotas = (
            vim_info["resources"]
            if target_vim["type"] == "openstack" or target_vim["type"] == "Openstack"
            else None
        )
        ids = target_vim_obj.create_slice_prerequisites(
            tenant_project_name,
            tenant_project_description,
            tenant_project_user,
            tenant_project_password,
            nest["_id"],
            quotas=quotas,
        )
        # Register the tenant to the mongo db
        target_vim["tenants"][nest["_id"]] = tenant_project_name
        mongoUtils.update("vim", target_vim["_id"], target_vim)

        # STEP-2a-ii: Αdd the new VIM tenant to NFVO
        if target_vim["type"] == "openstack":
            # Update the config parameter for the tenant
            config_param = dict(security_groups=ids["secGroupName"])
        elif target_vim["type"] == "opennebula":
            config_param = target_vim["config"]
        else:
            config_param = {}

        for nfvo_id in vim_info["nfvo_list"]:
            target_nfvo = mongoUtils.find("nfvo", {"id": nfvo_id})
            target_nfvo_obj = pickle.loads(mongoUtils.find("nfvo_obj", {"id": nfvo_id})["obj"])
            vim_id = target_nfvo_obj.addVim(
                tenant_project_name,
                target_vim["password"],
                target_vim["type"],
                target_vim["auth_url"],
                target_vim["username"],
                config_param,
            )
            vim_info["nfvo_vim_account"] = vim_info.get("nfvo_vim_account", {})
            vim_info["nfvo_vim_account"][nfvo_id] = vim_id
            # Register the tenant to the mongo db
            target_nfvo["tenants"][nest["_id"]] = target_nfvo["tenants"].get(nest["_id"], [])
            target_nfvo["tenants"][nest["_id"]].append(vim_id)
            mongoUtils.update("nfvo", target_nfvo["_id"], target_nfvo)

    mongoUtils.update("slice", nest["_id"], nest)
    # *** STEP-2b: WAN ***
    if mongoUtils.count("wim") <= 0:
        logger.warning("There is no registered WIM")
    else:
        wan_start_time = time.time()
        # Crate the data for the WIM
        wim_data = {"_id": nest["_id"], "core_connections": [], "extra_ns": [], "slice_sla": {}}
        # i) Create the slice_sla data for the WIM
        wim_data["slice_sla"] = {
            "network_DL_throughput": nest["network_DL_throughput"],
            "network_UL_throughput": nest["network_UL_throughput"],
            "mtu": nest["mtu"],
        }
        # ii) Add the connections
        for connection in nest["connections"]:
            data = {}
            for key in connection:
                key_data = {}
                try:
                    ns_l = connection[key]["ns_list"]
                except KeyError:
                    pass
                else:
                    key_data["ns"] = []
                    for ns in ns_l:
                        if ns["placement_loc"] not in key_data["ns"]:
                            key_data["ns"].append(ns["placement_loc"])
                try:
                    pnf_l = connection[key]["pnf_list"]
                except KeyError:
                    pass
                else:
                    key_data["pnf"] = pnf_l
                if key_data:
                    data[key] = key_data
            if data:
                wim_data["core_connections"].append(data)
        # iii) Add the extra Network Services
        for ns in nest["ns_list"]:
            if ns["placement_loc"] not in wim_data["extra_ns"]:
                wim_data["extra_ns"].append(ns["placement_loc"])
        # iV) Add the probes
        wim_data["probes"] = nest["probe_list"]
        # Select WIM - Assume that there is only one registered
        wim_list = list(mongoUtils.index("wim"))
        target_wim = wim_list[0]
        target_wim_id = target_wim["id"]
        target_wim_obj = pickle.loads(mongoUtils.find("wim_obj", {"id": target_wim_id})["obj"])
        target_wim_obj.create_slice(wim_data)
        nest["wim_data"] = wim_data
        target_wim["slices"][nest["_id"]] = nest["_id"]
        mongoUtils.update("wim", target_wim["_id"], target_wim)
        nest["deployment_time"]["WAN_Deployment_Time"] = format(time.time() - wan_start_time, ".4f")
    nest["deployment_time"]["Provisioning_Time"] = format(time.time() - prov_start_time, ".4f")

    # **** STEP-3: Slice Activation Phase****
    nest["status"] = "Activation"
    mongoUtils.update("slice", nest["_id"], nest)
    logger.info(f"{nest['_id']} Status: Activation")
    # *** STEP-3a: Cloud ***
    # Instantiate NS
    # Store info about instantiated NSs
    ns_inst_info = {}
    nest["deployment_time"]["NS_Deployment_Time"] = {}
    for ns in total_ns_list:
        ns["start_time"] = time.time()
        ns_inst_info[ns["ns-id"]] = {}
        target_nfvo = mongoUtils.find("nfvo", {"id": ns["nfvo-id"]})
        target_nfvo_obj = pickle.loads(mongoUtils.find("nfvo_obj", {"id": ns["nfvo-id"]})["obj"])
        selected_vim = ns["placement_loc"]["vim"]
        nfvo_vim_account = vim_dict[selected_vim]["nfvo_vim_account"][ns["nfvo-id"]]
        nfvo_inst_ns = target_nfvo_obj.instantiateNs(ns["ns-name"], ns["nsd-id"], nfvo_vim_account)
        ns_inst_info[ns["ns-id"]][ns["placement_loc"]["location"]] = {"nfvo_inst_ns": nfvo_inst_ns}
        nest["conf_comp"]["nf"].append(ns["nsd-id"])
        time.sleep(4)
        time.sleep(2)

    # Get the nsr for each service and wait for the activation
    for ns in total_ns_list:
        target_nfvo = mongoUtils.find("nfvo", {"id": ns["nfvo-id"]})
        target_nfvo_obj = pickle.loads(mongoUtils.find("nfvo_obj", {"id": ns["nfvo-id"]})["obj"])
        site = ns["placement_loc"]
        nfvo_inst_ns_id = ns_inst_info[ns["ns-id"]][site["location"]]["nfvo_inst_ns"]
        insr = target_nfvo_obj.getNsr(nfvo_inst_ns_id)
        while insr["operational-status"] != "running" or insr["config-status"] != "configured":
            time.sleep(10)
            insr = target_nfvo_obj.getNsr(nfvo_inst_ns_id)
        nest["deployment_time"]["NS_Deployment_Time"][ns["ns-name"]] = format(
            time.time() - ns["start_time"], ".4f"
        )
        # Get the IPs of the instantiated NS
        vnf_list = []
        vnfr_id_list = target_nfvo_obj.getVnfrId(insr)
        for ivnfr_id in vnfr_id_list:
            vnfr = target_nfvo_obj.getVnfr(ivnfr_id)
            vnf_list.append(target_nfvo_obj.getIPs(vnfr))
        ns_inst_info[ns["ns-id"]][site["location"]]["vnfr"] = vnf_list

    nest["ns_inst_info"] = ns_inst_info
    mongoUtils.update("slice", nest["_id"], nest)

    # *** STEP-3b: Radio Slice Configuration ***
    if mongoUtils.count("ems") <= 0:
        logger.warning("There is no registered EMS")
    else:
        # Add the management IPs for the NS sent ems in ems_messages:
        ems_radio_data = {
            "ue_DL_throughput": nest["ue_DL_throughput"],
            "ue_UL_throughput": nest["ue_UL_throughput"],
            "group_communication_support": nest["group_communication_support"],
            "number_of_terminals": nest["number_of_terminals"],
            "positional_support": nest["positional_support"],
            "radio_spectrum": nest["radio_spectrum"],
            "device_velocity": nest["device_velocity"],
            "terminal_density": nest["terminal_density"],
        }
        radio_start_time = time.time()
        for connection in nest["connections"]:
            data = {}
            ems_id_list = []
            for key in connection:
                key_data = {}
                try:
                    ems_id = connection[key]["ems-id"]
                except KeyError:
                    continue
                else:
                    if ems_id not in ems_id_list:
                        ems_id_list.append(ems_id)
                    try:
                        ns_l = connection[key]["ns_list"]
                    except KeyError:
                        pass
                    else:
                        key_data["ns"] = []
                        for ns in ns_l:
                            try:
                                ns_info = ns_inst_info[ns["ns-id"]][connection[key]["location"]]
                            except KeyError:
                                ns_info = ns_inst_info[ns["ns-id"]]["Core"]
                            ns_data = {
                                "name": ns["ns-name"],
                                "location": ns["placement_loc"]["location"],
                                "vnf_list": ns_info["vnfr"],
                            }
                            key_data["ns"].append(ns_data)
                try:
                    key_data["pnf"] = connection[key]["pnf_list"]
                except KeyError:
                    pass
                if key_data:
                    data[key] = key_data
            if data:
                data["slice_sla"] = ems_radio_data
                data["slice_id"] = nest["_id"]
                for ems_id in ems_id_list:
                    messages = ems_messages.get(ems_id, [])
                    messages.append(data)
                    ems_messages[ems_id] = messages

        for ems_id, ems_message in ems_messages.items():
            # Find the EMS
            target_ems = mongoUtils.find("ems", {"id": ems_id})
            if not target_ems:
                # Error handling: There is no such EMS
                logger.error("EMS {} not found - No configuration".format(ems_id))
                continue
            target_ems_obj = pickle.loads(mongoUtils.find("ems_obj", {"id": ems_id})["obj"])
            # Send the message
            for imessage in ems_message:
                target_ems_obj.conf_radio(imessage)
            nest["conf_comp"]["ems"].append(ems_id)
        nest["ems_data"] = ems_messages
        nest["deployment_time"]["Radio_Configuration_Time"] = format(
            time.time() - radio_start_time, ".4f"
        )

    # *** STEP-4: Finalize ***
    logger.info(f"{nest['_id']} Status: Running")
    nest["status"] = "Running"
    nest["deployment_time"]["Slice_Deployment_Time"] = format(
        time.time() - nest["created_at"], ".4f"
    )
    mongoUtils.update("slice", nest["_id"], nest)
Esempio n. 27
0
# Create the Kafka Consumer
consumer = kafkaUtils.create_consumer("slice")

# Create the initial core location
try:
    new_uuid = str(uuid.uuid4())
    core_location_data = {
        "_id": new_uuid,
        "id": "core",
        "created_at": time.time(),
        "description": "The default Core location",
        "vims": [],
        "functions": [],
    }
    mongoUtils.add("location", core_location_data)
except pymongo.errors.DuplicateKeyError:
    pass

# Check for new messages
for message in consumer:
    logger.info("--- New Message ---")
    logger.info("Topic: {0} | Partition: {1} | Offset: {2}".format(
        message.topic, message.partition, message.offset))
    # Commit the latest received message
    consumer.commit()
    action = message.value["action"]
    # Add slice
    if action == "add":
        payload = message.value["message"]
        sliceUtils.add_slice(payload)