def _createrouterport(self, id, router, subnet, **kwargs): routerport = RouterPort.create_instance(id) routerport.router = ReferenceObject(VRouter.default_key(router)) routerport.subnet = ReferenceObject(SubNet.default_key(subnet)) for k, v in kwargs.items(): setattr(routerport, k, v) return routerport
def _createlogicalport(self, logicalnetwork, id = None, **kwargs): if not id: id = str(uuid1()) new_port = LogicalPort.create_instance(id) new_port.logicalnetwork = ReferenceObject(LogicalNetwork.default_key(logicalnetwork)) for k,v in kwargs.items(): setattr(new_port, k, v) return new_port
def _ipam_stage2(keys, values, timestamp): pool = values[0] pool.subnetmap = ReferenceObject( SubNetMap.default_key(subnet_id)) self._remove_staled_ips(pool, timestamp) if gateway is not None and gateway in pool.reserved_ips: # Reserve forever pool.reserved_ips[gateway] = None return ((keys[0], ), (pool, ))
def _createphysicalport(self,physicalnetwork,name,vhost,systemid,bridge,**args): p = PhysicalPort.create_instance(vhost,systemid,bridge,name) p.physicalnetwork = ReferenceObject(PhysicalNetwork.default_key(physicalnetwork)) for k,v in args.items(): setattr(p,k,v) return p
def _createlogicalnetwork(self, physicalnetwork, id = None, **kwargs): if not id: id = str(uuid1()) new_network = LogicalNetwork.create_instance(id) for k,v in kwargs.items(): setattr(new_network, k, v) new_network.physicalnetwork = ReferenceObject(PhysicalNetwork.default_key(physicalnetwork)) new_networkmap = LogicalNetworkMap.create_instance(id) new_networkmap.network = new_network.create_reference() return new_network,new_networkmap
def createproxyarp(self, connection, arpentries): ''' Create ARP respond flow for specified ARP entries, each is a tuple (ip_address, mac_address, logical_network_id, local). When local is True, only respond to ARP request from logical port; when local is False, only respond to ARP request from physical port; respond to both else. ''' arp_list = self._extra_arps.setdefault(connection, []) arp_list.extend(arpentries) if connection in self._flowupdaters: self._flowupdaters[connection].updateobjects([ReferenceObject(LogicalNetwork.default_key(nid)) for _, _, nid, _ in arpentries])
def _createphysicalport(self, physicalnetwork, name, systemid='%', bridge='%', **kwargs): new_port = PhysicalPort.create_instance(systemid, bridge, name) new_port.physicalnetwork = ReferenceObject( PhysicalNetwork.default_key(physicalnetwork)) for k, v in kwargs.items(): setattr(new_port, k, v) return new_port
def _createlogicalnetwork(self,physicalnetwork,id,**args): logicalnetwork = LogicalNetwork.create_instance(id) logicalnetworkmap = LogicalNetworkMap.create_instance(id) for k,v in args.items(): setattr(logicalnetwork,k,v) logicalnetworkmap.network = logicalnetwork.create_reference() logicalnetwork.physicalnetwork = ReferenceObject(PhysicalNetwork.default_key(physicalnetwork)) return (logicalnetwork,logicalnetworkmap)
def removeproxyarp(self, connection, arpentries): ''' Remove specified ARP entries. ''' arp_list = self._extra_arps[connection] for entry in arpentries: try: arp_list.remove(entry) except KeyError: pass if connection in self._flowupdaters: self._flowupdaters[connection].updateobjects([ReferenceObject(LogicalNetwork.default_key(nid)) for _, _, nid, _ in arpentries])
def updater_with_key(keys, values, timestamp): # Automatically manage extra keys remove_uniquekeys = [] remove_multikeys = [] update_uniquekeys = [] update_multikeys = [] keystart = orig_len + len(auto_remove_keys) for v in values[:keystart]: if v is not None: if hasattr(v, 'kvdb_uniquekeys'): remove_uniquekeys.extend((k, v.create_weakreference()) for k in v.kvdb_uniquekeys()) if hasattr(v, 'kvdb_multikeys'): remove_multikeys.extend((k, v.create_weakreference()) for k in v.kvdb_multikeys()) if self.debuggingupdater: # Updater may be called more than once, ensure that this updater does not crash # on multiple calls kc = keys[:orig_len] vc = [ v.clone_instance() if v is not None and hasattr(v, 'clone_instance') else deepcopy(v) for v in values[:orig_len] ] if withtime: updated_keys, updated_values = updater(kc, vc, timestamp) else: updated_keys, updated_values = updater(kc, vc) if withtime: updated_keys, updated_values = updater(keys[:orig_len], values[:orig_len], timestamp) else: updated_keys, updated_values = updater(keys[:orig_len], values[:orig_len]) for v in updated_values: if v is not None: if hasattr(v, 'kvdb_uniquekeys'): update_uniquekeys.extend((k, v.create_weakreference()) for k in v.kvdb_uniquekeys()) if hasattr(v, 'kvdb_multikeys'): update_multikeys.extend((k, v.create_weakreference()) for k in v.kvdb_multikeys()) extrakeysdict = dict( zip(keys[keystart:keystart + len(extra_keys)], values[keystart:keystart + len(extra_keys)])) extrakeysetdict = dict( zip( keys[keystart + len(extra_keys):keystart + len(extra_keys) + len(extra_key_set)], values[keystart + len(extra_keys):keystart + len(extra_keys) + len(extra_key_set)])) tempdict = {} old_values = dict(zip(keys, values)) updated_keyset = set(updated_keys) try: append_remove = set() autoremove_keys = set() # Use DFS to find auto remove keys def dfs(k): if k in autoremove_keys: return autoremove_keys.add(k) if k not in old_values: append_remove.add(k) else: oldv = old_values[k] if oldv is not None and hasattr( oldv, 'kvdb_autoremove'): for k2 in oldv.kvdb_autoremove(): dfs(k2) for k, v in zip(updated_keys, updated_values): if v is None: dfs(k) if append_remove: raise _NeedMoreKeysException() for k, v in remove_uniquekeys: if v.getkey() not in updated_keyset and v.getkey( ) not in auto_remove_keys: # This key is not updated, keep the indices untouched continue if k not in extrakeysdict: raise _NeedMoreKeysException() elif extrakeysdict[k] is not None and extrakeysdict[ k].ref.getkey() == v.getkey(): # If the unique key does not reference to the correct object # there may be an error, but we ignore this. # Save in a temporary dictionary. We may restore it later. tempdict[k] = extrakeysdict[k] extrakeysdict[k] = None setkey = UniqueKeyReference.get_keyset_from_key(k) if setkey not in extrakeysetdict: raise _NeedMoreKeysException() else: ks = extrakeysetdict[setkey] if ks is None: ks = UniqueKeySet.create_from_key(setkey) extrakeysetdict[setkey] = ks ks.set.dataset().discard(WeakReferenceObject(k)) for k, v in remove_multikeys: if v.getkey() not in updated_keyset and v.getkey( ) not in auto_remove_keys: # This key is not updated, keep the indices untouched continue if k not in extrakeysdict: raise _NeedMoreKeysException() else: mk = extrakeysdict[k] if mk is not None: mk.set.dataset().discard(v) if not mk.set.dataset(): tempdict[k] = extrakeysdict[k] extrakeysdict[k] = None setkey = MultiKeyReference.get_keyset_from_key( k) if setkey not in extrakeysetdict: raise _NeedMoreKeysException() else: ks = extrakeysetdict[setkey] if ks is None: ks = MultiKeySet.create_from_key( setkey) extrakeysetdict[setkey] = ks ks.set.dataset().discard( WeakReferenceObject(k)) for k, v in update_uniquekeys: if k not in extrakeysdict: raise _NeedMoreKeysException() elif extrakeysdict[k] is not None and extrakeysdict[ k].ref.getkey() != v.getkey(): raise AlreadyExistsException('Unique key conflict for %r and %r, with key %r' % \ (extrakeysdict[k].ref.getkey(), v.getkey(), k)) elif extrakeysdict[k] is None: lv = tempdict.get(k, None) if lv is not None and lv.ref.getkey() == v.getkey(): # Restore this value nv = lv else: nv = UniqueKeyReference.create_from_key(k) nv.ref = ReferenceObject(v.getkey()) extrakeysdict[k] = nv setkey = UniqueKeyReference.get_keyset_from_key(k) if setkey not in extrakeysetdict: raise _NeedMoreKeysException() else: ks = extrakeysetdict[setkey] if ks is None: ks = UniqueKeySet.create_from_key(setkey) extrakeysetdict[setkey] = ks ks.set.dataset().add(nv.create_weakreference()) for k, v in update_multikeys: if k not in extrakeysdict: raise _NeedMoreKeysException() else: mk = extrakeysdict[k] if mk is None: mk = tempdict.get(k, None) if mk is None: mk = MultiKeyReference.create_from_key(k) mk.set = DataObjectSet() setkey = MultiKeyReference.get_keyset_from_key(k) if setkey not in extrakeysetdict: raise _NeedMoreKeysException() else: ks = extrakeysetdict[setkey] if ks is None: ks = MultiKeySet.create_from_key(setkey) extrakeysetdict[setkey] = ks ks.set.dataset().add(mk.create_weakreference()) mk.set.dataset().add(v) extrakeysdict[k] = mk except _NeedMoreKeysException: # Prepare the keys extra_keys[:] = list( set( itertools.chain((k for k, v in remove_uniquekeys if v.getkey() in updated_keyset or v.getkey() in autoremove_keys), (k for k, v in remove_multikeys if v.getkey() in updated_keyset or v.getkey() in autoremove_keys), (k for k, _ in update_uniquekeys), (k for k, _ in update_multikeys)))) extra_key_set[:] = list( set( itertools.chain( (UniqueKeyReference.get_keyset_from_key(k) for k, v in remove_uniquekeys if v.getkey() in updated_keyset or v.getkey() in autoremove_keys), (MultiKeyReference.get_keyset_from_key(k) for k, v in remove_multikeys if v.getkey() in updated_keyset or v.getkey() in autoremove_keys), (UniqueKeyReference.get_keyset_from_key(k) for k, _ in update_uniquekeys), (MultiKeyReference.get_keyset_from_key(k) for k, _ in update_multikeys)))) auto_remove_keys.clear() auto_remove_keys.update( autoremove_keys.difference(keys[:orig_len]).difference( extra_keys).difference(extra_key_set)) raise else: extrakeys_list = list(extrakeysdict.items()) extrakeyset_list = list(extrakeysetdict.items()) autoremove_list = list( autoremove_keys.difference(updated_keys).difference( extrakeysdict.keys()).difference( extrakeysetdict.keys())) return (tuple( itertools.chain(updated_keys, (k for k, _ in extrakeys_list), (k for k, _ in extrakeyset_list), autoremove_list)), tuple( itertools.chain(updated_values, (v for _, v in extrakeys_list), (v for _, v in extrakeyset_list), [None] * len(autoremove_list))))