def main(): # empty databases empty_nsd_collection() empty_vnfd_collection() empty_ns_collection() empty_operation_collection() empty_resources_collection() empty_nsir_collection() empty_notification_collection() empty_osm_collection() # path to descriptors folders path = "../../descriptors/" # list of file names that contain ns and vnf descriptors ns_descriptors = ["CDN_all_NSD_0_4.json"] vnf_descriptors = [ "CDN_SPR1_VNFD_0_2.json", "CDN_SPR21_VNFD_0_2.json", "CDN_SPR22_VNFD_0_2.json", "CDN_WEBSERVER_VNFD_0_2.json" ] # NSD SECTION # correspondance of nsdId and nsdCloudifyId nsdCloudifyId = {"vCDN_v02": "unknown"} # for each nsd create record to be inserted nsd_json = {} # load json file here for nsd_file in ns_descriptors: with open(path + nsd_file) as nsd_json: nsd_json = load(nsd_json) nsd_record = { "nsdId": nsd_json["nsd"]["nsdIdentifier"], "nsdCloudifyId": nsdCloudifyId[nsd_json["nsd"]["nsdIdentifier"]], "version": nsd_json["nsd"]["version"], "nsdName": nsd_json["nsd"]["nsdName"], "nsdJson": nsd_json } insert_nsd(nsd_record) # VNFD SECTION # for each nsd create record to be inserted vnfd_json = {} # load json file here for vnfd_file in vnf_descriptors: with open(path + vnfd_file) as vnfd_json: vnfd_json = load(vnfd_json) vnfd_record = { "vnfdId": vnfd_json["vnfdId"], "vnfdVersion": vnfd_json["vnfdVersion"], "vnfdName": vnfd_json["vnfProductName"], "vnfdJson": vnfd_json } insert_vnfd(vnfd_record) log_process.terminate()
def onboard_vnfd(body): """ Function to onboard the VNFD, including the downloading from the url specified in the input. Parameters ---------- body: dict IFA013 request to onboard a vnfd. Returns ------- info: dict Dictionary with the IFA013 answer to the vnfd onboarding process """ log_queue.put(["INFO", "vnf_package_path: %s" % body.vnf_package_path]) filename = wget.download(body.vnf_package_path) tf = tarfile.open(filename) tf.extractall() with tf as _tar: for member in _tar: if member.isdir(): continue print(member.name) if member.name.find("json"): fname = member.name break # upload it in the vnfd_db if (fname): with open(fname) as vnfd_json: vnfd_json = load(vnfd_json) vnfd_record = { "vnfdId": vnfd_json["vnfdId"], "vnfdVersion": vnfd_json["vnfdVersion"], "vnfdName": vnfd_json["vnfProductName"], "vnfdJson": vnfd_json } if vnfd_db.exists_vnfd(vnfd_json["vnfdId"], vnfd_json["vnfdVersion"]): vnfd_db.delete_vnfd_json(vnfd_json["vnfdId"]) # then insert it again (creation or "update") vnfd_db.insert_vnfd(vnfd_record) # upload the descriptor in the MANO platform onboard_vnfd_mano(vnfd_json) # create the answer info = { "onboardedVnfPkgInfoId": vnfd_record["vnfdId"], "vnfId": vnfd_record["vnfdId"] } # remove the tar package and the json file os.remove(fname) return info
def descriptor_viewer(): """ This function just responds to the browser Url :return: the rendered template 'descriptor.html' """ if request.method == 'POST': try: already_onboarded_in_so = False # retrieving the IFA descriptor # print(request.form, request.files) if 'convert_text' in request.form: ifa_json = json.loads(request.form['convert_text']) elif 'file_to_convert' in request.files: f = request.files['file_to_convert'] response = f.read() ifa_json = json.loads(response.decode('utf-8')) elif 'show_json' in request.form: ifa_json = eval(request.form['show_json']) already_onboarded_in_so = True elif 'onboard_json' in request.form: ifa_json = eval(request.form['onboard_json']) record = {} if 'nsd' in ifa_json: # nsd case if 'vnfdId' in ifa_json['nsd']: record = { "nsdId": ifa_json["nsd"]["nsdIdentifier"], "nsdCloudifyId": {}, "version": ifa_json["nsd"]["version"], "nsdName": ifa_json["nsd"]["nsdName"], "nsdJson": ifa_json, "shareable": True, "domain": "local" } if nsd_db.get_nsd_json(nsdId=record['nsdId']) is None: nsd_db.insert_nsd(record) message = { "Success": 'nsdId : {} onboarded on SO with success!'. format(record['nsdId']) } else: log_queue.put( ["DEBUG", 'nsdId already in the SO DB']) raise ValueError('nsdId already in the SO DB') # nsd-composite case else: record = { "nsdId": ifa_json["nsd"]["nsdIdentifier"], "nsdCloudifyId": {}, "version": ifa_json["nsd"]["version"], "nsdName": ifa_json["nsd"]["nsdName"], "nsdJson": ifa_json, "shareable": False, "domain": "Composite" } if nsd_db.get_nsd_json(nsdId=record['nsdId']) is None: nsd_db.insert_nsd(record) message = { "Success": 'nsdId : {} onboarded on SO with success!'. format(record['nsdId']) } else: log_queue.put( ["DEBUG", 'nsdId already in the SO DB']) raise ValueError('nsdId already in the SO DB') # vnfd case else: record = { "vnfdId": ifa_json["vnfdId"], "vnfdVersion": ifa_json["vnfdVersion"], "vnfdName": ifa_json["vnfProductName"], "vnfdJson": ifa_json } if vnfd_db.get_vnfd_json( vnfdId=ifa_json["vnfdId"]) is None: vnfd_db.insert_vnfd(record) message = { 'Success': 'vnfdId : {} onboarded on SO with success!'.format( record['vnfdId']) } else: log_queue.put(["DEBUG", 'vnfdId already in the SO DB']) raise ValueError('vnfdId already in the SO DB') log_queue.put(["INFO", message["Success"]]) flash(message['Success'], 'success') already_onboarded_in_so = True else: raise ValueError('No text/file valid') if 'nsd' in ifa_json: if 'vnfdId' in ifa_json['nsd']: # convert a NSD list_osm_json, default_index = gui_utils.ifa014_conversion( ifa_json) default_osm_json = list_osm_json[default_index] osm_json_network = [] for level in list_osm_json: osm_json_network.append( json_graph.node_link_data( gui_utils.json_network_nsd(level))) descriptor_type = 'nsd' else: # convert a composite NSD list_osm_json, default_index = gui_utils.composite_desc_conversion( ifa_json) default_osm_json = list_osm_json[default_index] osm_json_network = [] for level in list_osm_json: osm_json_network.append( json_graph.node_link_data( gui_utils.json_network_composite_nsd(level))) descriptor_type = 'nsd-composite' else: # convert a VNFD list_osm_json = [gui_utils.ifa011_conversion(ifa_json)] default_osm_json = list_osm_json[ 0] # done in case of possible list of ifa vnfd conversion osm_json_network = [ json_graph.node_link_data( gui_utils.json_network_vnfd(default_osm_json)) ] descriptor_type = 'vnfd' yaml_descriptor_list = [] for osm_json in list_osm_json: yaml_descriptor_list.append( yaml.safe_dump(osm_json, default_flow_style=False)) yaml_ifa_descriptor = yaml.safe_dump(ifa_json, default_flow_style=False) return render_template( 'descriptor.html', html_title='Descriptor Viewer', descriptor_type=descriptor_type, yaml_network=osm_json_network, list_osm_json=list_osm_json, yaml_osm_descriptor=yaml_descriptor_list, yaml_ifa_descriptor=yaml_ifa_descriptor, ifa_json=ifa_json, already_onboarded_in_so=already_onboarded_in_so) except (TypeError, KeyError, ValueError) as error: message = {'Error': 'Error: {}'.format(error)} log_queue.put(["ERROR", message['Error']]) flash(message['Error'], 'danger') return redirect(url_for('home'))