def main(constraints=None, config_file=None, data_input=None, bw_allocs=None): # Create Radio Queues Database queue_database = BrassOrientDBHelper(constraints, config_file) queue_database.open_database() radio_input_name = 'Radio_Input' radio_control_name = 'Radio_Control' radio_queues_name = 'Radio_Queues' radio_input_property = 'Input_Rate' radio_control_property = 'BW_Allocs' radio_queues_property = 'Radio_Queues' # Create Classes and Properties in Database setup_node(queue_database, radio_input_name, radio_input_property, 'EMBEDDEDLIST') setup_node(queue_database, radio_control_name, radio_control_property, 'EMBEDDEDLIST') setup_node(queue_database, radio_queues_name, radio_queues_property, 'EMBEDDEDLIST') data_input_properties = {} if data_input is not None: with open(data_input, 'r') as f: data_input_properties[radio_input_property] = json.load(f) bw_allocs_properties = {} if bw_allocs is not None: with open(bw_allocs, 'r') as f: bw_allocs_properties[radio_control_property] = json.load(f) radio_queues_properties = {radio_queues_property: {}} queue_database.create_node(radio_input_name, data_input_properties) queue_database.create_node(radio_control_name, bw_allocs_properties) queue_database.create_node(radio_queues_name, radio_queues_properties) return queue_database
class OrientDBXMLImporter(object): """ Class responsible for importing mdl xml files into an orientdb database. :param databaseName: :param mdlFile: :param configFile: """ def __init__(self, databaseName, mdlFile, configFile = 'config.json'): self.loadrObject = [] self.uniqueIdentifiers = {} self.orientDB_helper = BrassOrientDBHelper(database_name=databaseName, config_file=configFile) self.mdlFile = mdlFile self.orientDB_helper.open_database(over_write=True) self.preprocessor = None self._schema = None def import_xml(self): """ Main function called to start the import process. A temporary copy of the xml file is saved with attributes removed from the root tag, ie <MDLRoot> or <VCL>. The temporary xml file is then valided against schema and parsed. Lastly the temporary xml file is removed. :param: :return: """ self.preprocessor = preprocess.create_preprocessor(self.mdlFile) self.preprocessor.preprocess_xml() self.parseXML(self.preprocessor.orientdb_xml_file) self.preprocessor.remove_orientdb_xml() def parseXML(self, xmlFile): """ Parses passed in xmlFile and calls functions to create nodes and edges in orientdb. :param str xmlFile: xml file to import :return: :raises BrassException: catches any parsing error and rethrows as BrassException """ # this is a stack we maintain when traversing the xml tree attribute_stack = [] # after we decide something should be a vertex we add it to the vertex list vertices = [] # a list of the vertices names (which could also be derived from vertices) # so we know what OrientDB classes to create verticesNames = [] # the two types of edges containmentEdges = [] referenceEdges = [] for event, elem in etree.iterparse(xmlFile, events=('start', 'end')): # at the beginning, add everything on the stack # the event can contain attributes eg:<QoSPolicy ID="GR1_to_TA1_MissionSLP"> (in which case we want to get them) # or not <TestMission> if event == 'start': item = {} item[elem.tag] = elem.text if elem.text else '' for el in elem.attrib.keys(): item[el] = elem.attrib[el] attribute_stack.append({elem.tag: item}) # the hardest part is at the end # we are trying to decide if the event closed out a vertex or something that should be an attribute of a vertex # eg: ''' <TestMission> <Name>Test Mission 1</Name> <Description>Test Mission 1: Frequency change</Description> <TmNSCompleteness>true</TmNSCompleteness> <TmNSCompletenessDescription>Complete</TmNSCompletenessDescription> </TestMission> ''' # in this example the algoritm should detect that TestMission should be a vertex # and Name, Description, TmNSCompleteness, TmNSCompletenessDescription should be attributes of TestMission elif event == 'end': # if the last attribute on the stack contains more than one thing, it's a vertex if len(attribute_stack[-1][list(attribute_stack[-1])[0]].keys()) > 1: try: elemText = attribute_stack[-1][list(attribute_stack[-1])[0]][list(attribute_stack[-1])[0]] if elemText.isspace(): attribute_stack[-1][list(attribute_stack[-1])[0]].pop(list(attribute_stack[-1])[0]) except: pass a = attribute_stack.pop() # if it doesn't have a unique identifier, will assign and also assign uid for the parent if self.uidAlreadyAssigned(a) == 0: a[list(a)[0]]['uid'] = self.assignUniqueId(list(a)[0]) try: if self.uidAlreadyAssigned(attribute_stack[-1]) == 0: attribute_stack[-1][list(attribute_stack[-1])[0]]['uid'] = self.assignUniqueId( list(attribute_stack[-1])[0]) except: pass # adding to the vertices list vertices.append(a) verticesNames.append(list(a)[0]) try: # creating a containment edge containmentEdges.append( [a[list(a)[0]]['uid'], attribute_stack[-1][list(attribute_stack[-1])[0]]['uid']]) except: pass try: if len(attribute_stack) > 1: if self.uidAlreadyAssigned(attribute_stack[-2]) == 0: attribute_stack[-2][list(attribute_stack[-2])[0]]['uid'] = self.assignUniqueId( list(attribute_stack[-2])[0]) except: raise BrassException(sys.exc_info()[0], 'MDLImporter.parseXML') # if it doesn't contain more than one thing, it's an attribute and will need to add it to the vertex right above on the stack else: tmp_idx_1_attribute_stack_keys = list(attribute_stack[-1]) tmp_idx_2_attribute_stack_keys = list(attribute_stack[-2]) #attribute_stack[-2][tmp_idx_2_attribute_stack_keys[0]] = dict( # attribute_stack[-2][tmp_idx_2_attribute_stack_keys[0]].items()| attribute_stack[-1][ # tmp_idx_1_attribute_stack_keys[0]].items()) attribute_stack[-2][tmp_idx_2_attribute_stack_keys[0]] = self.merge_attribute_map( attribute_stack[-2][tmp_idx_2_attribute_stack_keys[0]], attribute_stack[-1][tmp_idx_1_attribute_stack_keys[0]] ) if 'uid' not in attribute_stack[-2][tmp_idx_2_attribute_stack_keys[0]].keys(): attribute_stack[-2][tmp_idx_2_attribute_stack_keys[0]]['uid'] = self.assignUniqueId( tmp_idx_2_attribute_stack_keys[0]) attribute_stack.pop() orientdbRestrictedIdentifier = [] for s in set(verticesNames): try: self.orientDB_helper.create_node_class(s) except: self.orientDB_helper.create_node_class(s + '_a') # certain names are reserved keywords in orientdb eg: Limit, so we need to do things a little different orientdbRestrictedIdentifier.append(s) # this is the part where we add the vertices one by one to orientdb for e in vertices: # print e try: classToInsertInto = list(e)[0] if classToInsertInto in orientdbRestrictedIdentifier: classToInsertInto += '_a' if classToInsertInto == 'MDLRoot': e[list(e)[0]]['schema'] = self._schema print(f"create_node({classToInsertInto}, {e[list(e)[0]]})") self.orientDB_helper.create_node(classToInsertInto, e[list(e)[0]]) except: raise BrassException(sys.exc_info()[1], 'MDLImporter.parseXML') #print "insert into " + e.keys()[0] + " (" + columns + ") values (" + values + ")" self.orientDB_helper.create_edge_class('Containment') # adding containment edges for edge in containmentEdges: # print "create edge Containment from (SELECT FROM V WHERE uid = '"+edge[0]+"') TO (SELECT FROM V WHERE uid = '"+edge[1]+"')" try: child = [condition_str('uid', edge[0])] parent = [condition_str('uid', edge[1])] self.orientDB_helper.set_containment_relationship( parent_conditions=parent, child_conditions=child ) except: raise BrassException(sys.exc_info()[0], 'MDLImporter.parseXML') # print edge[0], edge[1] self.orientDB_helper.create_edge_class('Reference') # for some stupid reason columns are case sensitive in orientdb for idref in self.orientDB_helper.run_query(select_sql('V', data_to_extract=['distinct(IDREF) as idref'])): # sometimes we have orphans so we need to escape them. try: reference_condition = [condition_str('IDREF', idref.idref)] referent_condition = [condition_str('ID', idref.idref)] self.orientDB_helper.set_reference_relationship( referent_condition=referent_condition, reference_condition=reference_condition ) except: pass def assignUniqueId(self, entityType): """ Creates a unique id based on the entityType. :param str entityType: name of the entity (ie TestMissions, RadioLinks, MDLRoot, etc) :return: a unique id in string """ uniqId = '' if entityType in self.uniqueIdentifiers.keys(): self.uniqueIdentifiers[entityType] += 1 else: self.uniqueIdentifiers[entityType] = 0 uniqId = entityType + '-' + str(self.uniqueIdentifiers[entityType]) return uniqId def uidAlreadyAssigned(self, element): """ Checks if the element already has a unique id. :param element: element to check :return: True or False """ if 'uid' in element[list(element)[0]].keys(): return 1 return 0 def merge_attribute_map(self, src_map, dest_map): attribute_map = dict(src_map) for i in dest_map: if i in attribute_map: if type(attribute_map[i]) is list: attribute_map[i].append(dest_map[i]) else: attribute_map[i] = list((attribute_map[i], dest_map[i])) else: attribute_map[i] = dest_map[i] return attribute_map
def main(database=None, config_file=None, mdl_file=None, constraints=None): """ Instantiates a Processor object and passes in the orientDB database name. Instantiates a Constraints_Database object and passes in the orientDB database name for the system constraints. Pulls down constraints for simulation from the database Overwrites mdl in database with source mdl Calls create_new_schedule() Update database with new schedule Export new MLD :param database: name of an OrientDB :param config_file: location of the config file used to import :return: """ # Open databases for MDL and System Constraints processor = BrassOrientDBHelper(database, config_file) constraints_database = BrassOrientDBHelper(constraints, config_file) constraints_database.open_database(over_write=False) scenarios = constraints_database.get_nodes_by_type("TestScenario") for scenario in scenarios: if scenario.name == "Test Scenario 1": scenario_1 = scenario constraints_list = constraints_database.get_child_nodes( scenario_1._rid, edgetype='HasConstraint') for constraint in constraints_list: if constraint.name == 'system wide constraint': system_constraints = constraint.constraint_data break constraints_database.close_database() # MDL Import Step mdl_full_path = os.path.abspath((mdl_file)) importer = MDLImporter(database, mdl_full_path, config_file) importer.import_xml() # importer.import_mdl() processor.open_database(over_write=False) # Create new Schedule new_schedule, final_fitness = create_new_schedule( system_constraints=system_constraints) print("Final Schedule:\n") for c in new_schedule: c.print_transmission() # Begin Updating MDL Database txop_verties = processor.get_nodes_by_type("TxOp") radio_link_vertices = processor.get_nodes_by_type("RadioLink") radio_link_up_name = "GndRadio_to_TA" radio_link_down_name = "TA_to_GndRadio" for r in radio_link_vertices: if r.Name == radio_link_up_name: radio_link_up = r elif r.Name == radio_link_down_name: radio_link_down = r print(txop_verties) old_txop_list = [op._rid for op in txop_verties] processor.delete_nodes_by_rid(old_txop_list) txop_rids = [] for TxOp in new_schedule: txop_properties = { "StopUSec": TxOp.get_end_time(), "TxOpTimeout": 255, "CenterFrequencyHz": 4919500000, "StartUSec": TxOp.get_start_time() } processor.create_node("TxOp", txop_properties) new_txop = processor.get_nodes_by_type("TxOp") for op in new_txop: if op._rid not in txop_rids: txop_rids.append(op._rid) new_op_rid = op._rid if TxOp.link_direction == 'down': radio_rid = radio_link_down._rid elif TxOp.link_direction == 'up': radio_rid = radio_link_up._rid processor.set_containment_relationship(parent_rid=radio_rid, child_rid=new_op_rid) processor.close_database() print("Fitness: {0}".format(final_fitness)) print("Total Transmission Time: {0} microseconds per epoch".format( total_transmission_time(new_schedule))) print("Final Schedule:\n") for c in new_schedule: c.print_transmission() # Export Updated MDL # export = MDLExporter(database, config_file) export = MDLExporter(database, "Scenario_1_Export.xml", config_file) export.export_xml()