def __init__(self, module, product): self.module = module self.product = product self._validate() self._session = self._credentials() self._adapter = Adapter(self._session) self._endpoints = {} self._project_id = ""
def __init__(self, session, container_uuid=None): self._uuid = container_uuid self._session = Adapter( session=session, service_type="container", interface="public", # 1.25 - support for encode/decode archive file default_microversion="1.25", )
def session(): auth_kwargs = dict(auth_url=get("auth_url")) token = get("token") if token: auth_klass = v3.Token auth_kwargs.update(token=token) else: auth_klass = v3.Password auth_kwargs.update( username=get("username"), user_domain_name=get("user_domain_name"), password=get("password"), ) project_id = get("project_id") if project_id: auth_kwargs.update(project_id=project_id) else: auth_kwargs.update( project_name=get("project_name"), project_domain_name=get("project_domain_name"), ) auth = auth_klass(**auth_kwargs) sess = Session(auth=auth) return Adapter(session=sess, interface=get("interface"), region_name=get("region_name"))
def _gen_provider_client(self): m = self._module p = { "auth_url": m.params['identity_endpoint'], "password": m.params['password'], "username": m.params['user'], "project_name": m.params['project'], "user_domain_name": m.params['domain'], "reauthenticate": True } self._project_client = Adapter(session.Session(auth=v3.Password(**p)), raise_exc=False) p.pop("project_name") self._domain_client = Adapter(session.Session(auth=v3.Password(**p)), raise_exc=False)
def _gen_provider_client(self): logger = self._init_log() m = self._module p = { "auth_url": m.params['identity_endpoint'], "password": m.params['password'], "username": m.params['user_name'], "project_name": m.params['project_name'], "user_domain_name": m.params['domain_name'], "reauthenticate": True } self._project_client = Adapter(session.Session(auth=v3.Password(**p)), logger=logger) p.pop("project_name") self._domain_client = Adapter(session.Session(auth=v3.Password(**p)), logger=logger)
async def pre_launch(self, **kwargs: "Any") -> "Dict[str, Any]": kwargs = await super().pre_launch(**kwargs) session = self._get_session() self.neutron = Adapter( session=session, service_type="network", interface="public", ) if not self.container_uuid: raise ValueError("Missing container UUID") self.zun = ZunClient(session, container_uuid=self.container_uuid) # Place some value in "cmd" even though we're not launching a kernel; # other code nevertheless assumes this will have some value kwargs["cmd"] = [ "echo", "The Zun provisioner does not support the kernel 'cmd' argument.", ] # TODO: possibly attempt to start the container if it is stopped/created return kwargs
def parallel_subnetwork_request(item, value): global parameter_local_cidr net_adap_remote = Adapter( auth=auth, session=sess, service_type='network', interface='public', region_name=item) try: subnetworks_temp = net_adap_remote.get('/v2.0/subnets/').json() except: app_log.info("Exception when contacting the network adapter") for subnetwork in subnetworks_temp['subnets']: if (item == local_region_name): parameter_local_cidr_temp.append(subnetwork['cidr']) if(value == subnetwork['network_id']): CIDRs.append(ipaddress.ip_network(subnetwork['cidr'])) break
auth_ref = auth.auth_ref print("Auth Token: %s" % auth_ref.auth_token) import ipdb ipdb.set_trace() # Service catalog print("Has service catalog: %s" % auth_ref.has_service_catalog()) print("Service catalog: %s" % auth_ref.service_catalog.catalog) print(sess.get_endpoint(service_type='identity')) # import ipdb; ipdb.set_trace() ks_adap = Adapter(auth=auth, session=sess, service_type='identity', interface='admin', region_name=OS['name']) services = ks_adap.get('/v3/services') print(services.json()) # Discover net net_adap = Adapter(auth=auth, session=sess, service_type='network', interface=['internal', 'public'], region_name=OS['name']) net_discovery = net_adap.get('/') print(net_discovery.json())
class HwcSession(object): """Handles all authentation and HTTP sessions for HWC API calls.""" def __init__(self, module, product): self.module = module self.product = product self._validate() self._session = self._credentials() self._adapter = Adapter(self._session) self._endpoints = {} self._project_id = "" def get(self, url, body=None): try: return self._adapter.get(url, json=body, headers=self._headers(), raise_exc=False) except getattr(requests.exceptions, 'RequestException') as inst: self.module.fail_json(msg=inst.message) def post(self, url, body=None): try: return self._adapter.post(url, json=body, headers=self._headers(), raise_exc=False) except getattr(requests.exceptions, 'RequestException') as inst: self.module.fail_json(msg=inst.message) def delete(self, url, body=None): try: return self._adapter.delete(url, json=body, headers=self._headers(), raise_exc=False) except getattr(requests.exceptions, 'RequestException') as inst: self.module.fail_json(msg=inst.message) def put(self, url, body=None): try: return self._adapter.put(url, json=body, headers=self._headers(), raise_exc=False) except getattr(requests.exceptions, 'RequestException') as inst: self.module.fail_json(msg=inst.message) def get_service_endpoint(self, service_type): if self._endpoints.get(service_type): return self._endpoints.get(service_type) e = None try: e = self._session.get_endpoint_data( service_type=service_type, region_name=self.module.params['region']) except getattr(requests.exceptions, 'RequestException') as inst: self.module.fail_json(msg=inst.message) if not e or e.url == "": self.module.fail_json(msg="Can not find the enpoint for %s" % service_type) url = e.url if url[-1] != "/": url += "/" self._endpoints[service_type] = url return url def get_project_id(self): if self._project_id: return self._project_id try: pid = self._session.get_project_id() self._project_id = pid return pid except getattr(requests.exceptions, 'RequestException') as inst: self.module.fail_json(msg=inst.message) def _validate(self): if not HAS_REQUESTS: self.module.fail_json(msg=missing_required_lib('requests'), exception=REQUESTS_IMP_ERR) if not HAS_THIRD_LIBRARIES: self.module.fail_json(msg=missing_required_lib('keystoneauth1'), exception=THIRD_LIBRARIES_IMP_ERR) def _credentials(self): auth = v3.Password(auth_url=self.module.params['identity_endpoint'], password=self.module.params['password'], username=self.module.params['user'], user_domain_name=self.module.params['domain'], project_name=self.module.params['project'], reauthenticate=True) return session.Session(auth=auth) def _headers(self): return { 'User-Agent': "Huawei-Ansible-MM-%s" % self.product, 'Accept': 'application/json', }
def main(argv): opts, args = getopt.getopt(argv, "s:", ["size="]) FIRST_REGION_NAME = "RegionOne" KEYSTONE_ENDPOINT = "http://{{ ansible_eno1.ipv4.address }}/identity/v3" #KEYSTONE_ENDPOINT = "http://192.168.57.6/identity/v3" def get_session_object(auth_param): return session.Session(auth=auth_param) def get_auth_object(keystone_endpoint): return v3.Password( username="******", password="******", project_name="demo", auth_url=keystone_endpoint, user_domain_id="default", project_domain_id="default", include_catalog=True, # Allow fetching a new token if the current one is going to expire reauthenticate=True) auth = get_auth_object(KEYSTONE_ENDPOINT) sess = get_session_object(auth) # Authenticate auth.get_access(sess) auth_ref = auth.auth_ref #print("Auth token: %s" %auth_ref.auth_token) catalog_endpoints = auth_ref.service_catalog.catalog #print("Resource catalog: %s" % catalog_endpoints) regions_list_neu = [] regions_list_key = [] regions_list = [] for obj in catalog_endpoints: if obj['name'] == 'neutron': for endpoint in obj['endpoints']: #print(endpoint) new_endpoint_obj = { 'region_name': endpoint["region"], 'neutron_url': endpoint["url"] } regions_list_neu.append(new_endpoint_obj) if obj['name'] == 'keystone': for endpoint in obj['endpoints']: if endpoint['interface'] == 'public': new_endpoint_obj = { 'region_name': endpoint["region"], 'keystone_url': endpoint["url"] } regions_list_key.append(new_endpoint_obj) #print(endpoint) print(regions_list_neu) print(regions_list_key) for i in range(len(regions_list_neu)): neutron_endpoint = regions_list_neu[i] print(neutron_endpoint) for j in range(len(regions_list_key)): keystone_endpoint = regions_list_key[j] print(keystone_endpoint) if neutron_endpoint['region_name'] == keystone_endpoint[ 'region_name']: new_end = { 'region_name': neutron_endpoint['region_name'], 'keystone_url': keystone_endpoint['keystone_url'], 'neutron_url': neutron_endpoint['neutron_url'] } regions_list.append(new_end) print(regions_list) cidrs_region_network_information = { '10.0.0.0/24': [], '10.0.1.0/24': [], '10.0.2.0/24': [], '10.0.3.0/24': [], '10.0.4.0/24': [], '10.0.5.0/24': [], '10.0.6.0/24': [], '10.0.7.0/24': [], '10.0.8.0/24': [], '10.0.9.0/24': [], '10.0.10.0/24': [], '10.0.11.0/24': [], '10.0.12.0/24': [], '10.0.13.0/24': [], '10.0.14.0/24': [], '10.0.15.0/24': [], '10.0.16.0/24': [], '10.0.17.0/24': [], '10.0.18.0/24': [], '10.0.19.0/24': [], '10.0.20.0/24': [], '10.0.21.0/24': [], '10.0.22.0/24': [], '10.0.23.0/24': [], '10.0.24.0/24': [], '10.0.25.0/24': [], '10.0.26.0/24': [], '10.0.27.0/24': [], '10.0.28.0/24': [], '10.0.29.0/24': [], '10.0.30.0/24': [], '10.0.31.0/24': [], '10.0.32.0/24': [], '10.0.33.0/24': [], '10.0.34.0/24': [], '10.0.35.0/24': [], '10.0.36.0/24': [], '10.0.37.0/24': [], '10.0.38.0/24': [], '10.0.39.0/24': [], '10.0.40.0/24': [], '10.0.41.0/24': [], '10.0.42.0/24': [], '10.0.43.0/24': [], '10.0.44.0/24': [], '10.0.45.0/24': [], '10.0.46.0/24': [], '10.0.47.0/24': [], '10.0.48.0/24': [] } #cidrs_region_network_information = {'10.0.0.0/24': [], '20.0.0.0/24': []} # For every region find the networks created with heat for i in range(len(regions_list)): region_name, region_auth_endpoint, region_neutron_endpoint = regions_list[ i]['region_name'], regions_list[i][ 'keystone_url'] + '/v3', regions_list[i]['neutron_url'] auth = get_auth_object(region_auth_endpoint) sess = get_session_object(auth) print('Getting information from region ' + str(region_name)) # Authenticate auth.get_access(sess) auth_ref = auth.auth_ref net_adap = Adapter(auth=auth, session=sess, service_type='network', interface='public', region_name=region_name) per_region_net_list = net_adap.get('/v2.0/networks').json() region_network = per_region_net_list['networks'] # For every network find the cidr of the subnetwork it has for index in range(len(region_network)): net_ID = region_network[index]['id'] subnet_ID = region_network[index]['subnets'][0] per_net_subnet = net_adap.get('/v2.0/subnets/' + subnet_ID).json() subnet_cidr = per_net_subnet['subnet']['cidr'] #print(subnet_cidr) test_object = { 'region_name': region_name, 'net_uuid': net_ID, } cidrs_region_network_information[subnet_cidr].append(test_object) #print(cidrs_region_network_information) test_type1 = "L3" test_type2 = "L2" print('starting tests') test_sizes_temps = (opts[0][1]) test_sizes = test_sizes_temps.split(',') test_number = 100 configuration = Configuration() for elem in test_sizes: test_size = int(elem) if (test_type1 == "L3"): file_results = open( "results/Results_" + test_type1 + "_" + str(test_size) + "_" + str(datetime.datetime.now().strftime("%H:%M:%S")), "w+") #file_results.write("L3\n") #file_results.write(str(test_size)+"\n") #file_results.write(str(test_number)+"\n") for i in range(test_number): seed(datetime.datetime.now()) selected_index = randint(1, len(regions_list)) host = regions_list[selected_index - 1] #print(host['region_name']) configuration.host = host['neutron_url'][0:-5] + "7575/api" api_instance = swagger_client.ResourcesApi( swagger_client.ApiClient(configuration)) resource = swagger_client.Resource( ) # Resource | data for inter-site creation resource.type = "L3" resource.name = "Inter-site network test " + str(i) condition = True keys = [] regions = [] subresources = [] while (condition): seed(datetime.datetime.now()) key = random.choice(list(cidrs_region_network_information)) condition1 = True while (condition1): seed(datetime.datetime.now()) second_element = random.randint( 1, len(cidrs_region_network_information[key])) element = cidrs_region_network_information[key][ second_element - 1] if element['region_name'] == host['region_name']: #print(key) #print(element) keys.append(key) regions.append(element['region_name']) subresources.append(element['region_name'] + "," + element['net_uuid']) condition = False condition1 = False break for j in range(test_size - 1): #print(j) condition = True condition1 = True while (condition and condition1): seed(datetime.datetime.now()) key = random.choice( list(cidrs_region_network_information)) seed(datetime.datetime.now()) second_element = random.randint( 1, len(cidrs_region_network_information[key])) element = cidrs_region_network_information[key][ second_element - 1] if element[ 'region_name'] not in regions and key not in keys: #print(key) #print(element) keys.append(key) regions.append(element['region_name']) subresources.append(element['region_name'] + "," + element['net_uuid']) condition = False condition1 = False break print(i) print(subresources) print(regions) print(keys) resource.subresources = subresources api_response = '' #start = time.clock() start = time.time() try: # Horizontal request to create an inter-site Resource POST api_response = api_instance.vertical_create_resource( resource) #print(api_response['resource_global']) except ApiException as e: print( "Exception when calling VerticalApi->vertical_create_resource: %s\\n" % e) #end = time.clock() end = time.time() print(api_response["resource_global"]) print(start) print(end) print(end - start) file_results.write(str(end - start) + "\n") try: delete_resource = api_instance.vertical_delete_resource( api_response['resource_global']) except ApiException as e: print( "Exception when calling VerticalApi->vertical_create_resource: %s\n" % e) file_results.close() if (test_type2 == "L2"): file_results = open( "results/Results_" + test_type2 + "_" + str(test_size) + "_" + str(datetime.datetime.now().strftime("%H:%M:%S")), "w+") for i in range(test_number): seed(datetime.datetime.now()) selected_index = randint(1, len(regions_list)) host = regions_list[selected_index - 1] #print(host['region_name']) configuration.host = host['neutron_url'][0:-5] + "7575/api" api_instance = swagger_client.ResourcesApi( swagger_client.ApiClient(configuration)) resource = swagger_client.Resource( ) # Resource | data for inter-site creation resource.type = "L2" resource.name = "Inter-site network test " + str(i) condition = True regions = [] subresources = [] while (condition): seed(datetime.datetime.now()) key = random.choice(list(cidrs_region_network_information)) condition1 = True while (condition1): second_element = random.randint( 1, len(cidrs_region_network_information[key])) element = cidrs_region_network_information[key][ second_element - 1] if element['region_name'] == host['region_name']: regions.append(element['region_name']) subresources.append(element['region_name'] + "," + element['net_uuid']) condition = False condition1 = False break for j in range(test_size - 1): #print(j) condition = True while (condition): seed(datetime.datetime.now()) new_index = randint(1, len(regions_list)) new_host = regions_list[new_index - 1] #print(host['region_name']) if new_host['region_name'] not in regions: regions.append(new_host['region_name']) subresources.append(new_host['region_name'] + ",") condition = False break print(i) print(subresources) print(regions) resource.subresources = subresources api_response = "" start = time.time() try: # Horizontal request to create an inter-site Resource POST api_response = api_instance.vertical_create_resource( resource) #print(api_response['resource_global']) except ApiException as e: print( "Exception when calling VerticalApi->vertical_create_resource: %s\n" % e) end = time.time() print(api_response["resource_global"]) print(end - start) file_results.write(str(end - start) + "\n") try: delete_resource = api_instance.vertical_delete_resource( api_response['resource_global']) except ApiException as e: print( "Exception when calling VerticalApi->vertical_create_resource: %s\n" % e) file_results.close()
class ZunClient(object): RESTART_TIMEOUT = 15 # seconds def __init__(self, session, container_uuid=None): self._uuid = container_uuid self._session = Adapter( session=session, service_type="container", interface="public", # 1.25 - support for encode/decode archive file default_microversion="1.25", ) def get_container(self): res = self._session.get(f"/containers/{self._uuid}") return res.json() def get_client_connection_info(self): res = self._session.post( f"/containers/{self._uuid}/execute?command=env&run=true") env_output: str = res.json()["output"] env_lines = env_output.split("\n") runtime_dir = None for line in env_lines: if line.startswith("JUPYTER_RUNTIME_DIR"): runtime_dir = line.split("=")[1] break if not runtime_dir: # TODO: we can probably be more graceful here and try some sane # default locations or try to figure out the connection file location # somehow else. raise RuntimeError( "Cannot connect: container does not set JUPYTER_RUNTIME_DIR in environment" ) with self.download_path(runtime_dir) as tar: # Sort by mtime to get the latest connection file written by the # container process. conn_files = sorted( [ tarinfo for tarinfo in tar.getmembers() if tarinfo.isfile() and os.path.basename(tarinfo.name).startswith("kernel-") ], key=operator.attrgetter("mtime"), reverse=True, ) if not conn_files: raise RuntimeError( "Cannot connect: no kernel connection file found in running container" ) conn_file = tar.extractfile(conn_files[0]) return json.load(conn_file) def is_container_running(self): container = self.get_container() return container["status"] == "Running" async def restart_container(self, timeout: "int" = None): if timeout is None: timeout = self.RESTART_TIMEOUT self._session.post(f"/containers/{self._uuid}/reboot") async def _until_running(): while not self.is_container_running(): await asyncio.sleep(1.0) await asyncio.wait_for(_until_running(), float(timeout)) def kill_container(self, signum=signal.SIGKILL): return self._session.post( f"/containers/{self._uuid}/kill?signal={int(signum)}") def download_path(self, source_path: "str") -> "TarFile": res = self._session.get( f"/containers/{self._uuid}/get_archive?path={source_path}&encode_data=True" ) tar_data: str = res.json()["data"] fd = io.BytesIO(base64.decodebytes(tar_data.encode("utf-8"))) return tarfile.open(fileobj=fd, mode="r") def upload_path(self, dest_path: "str", contents: "io.BytesIO"): self._session.post( f"/containers/{self._uuid}/put_archive?path={dest_path}&decode_data=True", json={"data": base64.encodebytes(contents.read()).decode("utf-8")}, )
class ZunHydraKernelProvisioner(FileManagementMixin, HydraKernelProvisioner): container_uuid = Unicode() auth_provider = EntryPointType( "hydra_kernel.zun_provisioner.auth_provider") _auth_provider_factory = None poll_interval = 5.0 @property def auth_provider_factory(self): if not self._auth_provider_factory: for key, entry_point in self.auth_provider.load_entry_points( ).items(): try: self._auth_provider_factory = entry_point.load() except Exception as e: self.log.debug( "Failed to load %s entrypoint %r: %r", self.auth_provider.entry_point_group, key, e, ) return self._auth_provider_factory @property def has_process(self) -> bool: return self.zun is not None async def poll(self) -> "Optional[int]": if not self.zun.is_container_running(): return -1 async def send_signal(self, signum: "int") -> None: self.zun.kill_container(signum) def _get_session(self): if callable(self.auth_provider_factory): return self.auth_provider_factory() else: return default_auth_provider() async def pre_launch(self, **kwargs: "Any") -> "Dict[str, Any]": kwargs = await super().pre_launch(**kwargs) session = self._get_session() self.neutron = Adapter( session=session, service_type="network", interface="public", ) if not self.container_uuid: raise ValueError("Missing container UUID") self.zun = ZunClient(session, container_uuid=self.container_uuid) # Place some value in "cmd" even though we're not launching a kernel; # other code nevertheless assumes this will have some value kwargs["cmd"] = [ "echo", "The Zun provisioner does not support the kernel 'cmd' argument.", ] # TODO: possibly attempt to start the container if it is stopped/created return kwargs async def launch_kernel(self, command: "List[str]", **kwargs: "Any") -> "KernelConnectionInfo": if not self.zun.is_container_running(): self.binding.update_progress("Restarting container") await self.zun.restart_container() self.binding.update_progress("Checking container") container = self.zun.get_container() container_ports = [ addr["port"] for network_id, addrs in container["addresses"].items() for addr in addrs ] active_fips = self.neutron.get( f"/v2.0/floatingips?status=ACTIVE").json()["floatingips"] container_fip = next( iter([ fip for fip in active_fips if fip["port_id"] in container_ports ]), None, ) if not container_fip: raise RuntimeError( f"Cannot connect: container {container['uuid']} has no Floating IP attached" ) # Read kernel connection file conn_info = self.zun.get_client_connection_info() conn_info["ip"] = container_fip["floating_ip_address"] self.binding.update_progress("Ready") return conn_info def get_shutdown_wait_time(self, recommended: float = 5) -> float: # Allow containers to take upwards of 30 seconds to tear down return max(recommended, 30.0) async def upload_path(self, local_path: "str", remote_path: "str" = None): self.binding.update_progress("Preparing upload") fd = self.prepare_upload(local_path) self.binding.update_progress("Uploading") self.zun.upload_path(remote_path, fd) async def download_path(self, remote_path: "str", local_path: "str" = None): self.binding.update_progress("Downloading") with self.zun.download_path(remote_path) as tar: tar.extractall(local_path)
def main(argv): opts, args = getopt.getopt(argv,'c d', ['create', 'delete']) print(opts) for opt,arg in opts: if opt in ('-c', '--create'): regions_to_use = [["RegionOne","http://192.168.57.6/identity/v3"],["RegionTwo","http://192.168.57.9/identity/v3"]] for r_element in regions_to_use: FIRST_REGION_NAME, KEYSTONE_ENDPOINT = r_element[0], r_element[1] def get_session_object(auth_param): return session.Session(auth=auth_param) def get_auth_object(keystone_endpoint): return v3.Password( username="******", password="******", project_name="demo", auth_url=keystone_endpoint, user_domain_id="default", project_domain_id="default", include_catalog=True, # Allow fetching a new token if the current one is going to expire reauthenticate=True ) auth = get_auth_object(KEYSTONE_ENDPOINT) sess = get_session_object(auth) # Authenticate auth.get_access(sess) auth_ref = auth.auth_ref #print("Auth token: %s" %auth_ref.auth_token) local_net_adap = Adapter( auth=auth, session=sess, service_type='network', interface='public', region_name=FIRST_REGION_NAME) catalog_endpoints = auth_ref.service_catalog.catalog #print("Service catalog: %s" % catalog_endpoints) regions_list_neu = [] regions_list_key = [] regions_list = [] for obj in catalog_endpoints: if obj['name'] == 'neutron': for endpoint in obj['endpoints']: # print(endpoint) new_endpoint_obj = { 'region_name': endpoint["region"], 'neutron_url': endpoint["url"]} regions_list_neu.append(new_endpoint_obj) if obj['name'] == 'keystone': for endpoint in obj['endpoints']: if endpoint['interface'] == 'public': new_endpoint_obj = { 'region_name': endpoint["region"], 'keystone_url': endpoint["url"]} regions_list_key.append(new_endpoint_obj) # print(endpoint) print(regions_list_neu) print(regions_list_key) for i in range(len(regions_list_neu)): neutron_endpoint = regions_list_neu[i] print(neutron_endpoint) for j in range(len(regions_list_key)): keystone_endpoint = regions_list_key[j] print(keystone_endpoint) if neutron_endpoint['region_name'] == keystone_endpoint['region_name']: new_end = {'region_name': neutron_endpoint['region_name'], 'keystone_url': keystone_endpoint['keystone_url'], 'neutron_url': neutron_endpoint['neutron_url']} regions_list.append(new_end) print(regions_list) cidrs_region_network_information = {'10.0.0.0/24': [], '20.0.0.0/24': [], '30.0.0.0/24': [], '40.0.0.0/24': [], '50.0.0.0/24': [], '60.0.0.0/24': [], '70.0.0.0/24': [], '80.0.0.0/24': [ ], '90.0.0.0/24': [], '100.0.0.0/24': [], '110.0.0.0/24': [], '120.0.0.0/24': [], '130.0.0.0/24': [], '140.0.0.0/24': [], '150.0.0.0/24': [], '160.0.0.0/24': [], '170.0.0.0/24': [], '180.0.0.0/24': []} # For every region find the networks created manually for i in range(len(regions_list)): region_name, region_auth_endpoint, region_neutron_endpoint = regions_list[i]['region_name'], regions_list[i]['keystone_url']+'/v3', regions_list[i]['neutron_url'] auth = get_auth_object(region_auth_endpoint) sess = get_session_object(auth) print('Getting information from region ' + str(region_name)) # Authenticate auth.get_access(sess) auth_ref = auth.auth_ref net_adap = Adapter( auth=auth, session=sess, service_type='network', interface='public', region_name=region_name) per_region_net_list = net_adap.get('/v2.0/networks').json() region_network = per_region_net_list['networks'] # For every network find the cidr of the subnetwork it has for index in range(len(region_network)): net_ID = region_network[index]['id'] subnet_ID = region_network[index]['subnets'][0] per_net_subnet = net_adap.get('/v2.0/subnets/'+subnet_ID).json() subnet_cidr = per_net_subnet['subnet']['cidr'] #print(subnet_cidr) test_object = { 'region_name': region_name, 'net_uuid': net_ID, 'keystone': region_auth_endpoint } cidrs_region_network_information[subnet_cidr].append(test_object) print('starting tests') print(cidrs_region_network_information) def parallel_inters_creation_request(uuid, resources): starting_time = time.time() local_resource = '' remote_region = '' remote_uuid = '' remote_keystone = '' first_obj = resources[0] second_obj = resources[1] if first_obj['region_name'] == FIRST_REGION_NAME: local_resource = first_obj['net_uuid'] remote_region = second_obj['region_name'] remote_uuid = second_obj['net_uuid'] remote_keystone = second_obj['keystone'] else: local_resource = second_obj['net_uuid'] remote_region = first_obj['region_name'] remote_uuid = first_obj['net_uuid'] remote_keystone = first_obj['keystone'] print('Starting thread at time: ' + str(starting_time)) interconnection_data = {'interconnection': { 'name': 'test', 'remote_keystone': remote_keystone, 'remote_region': remote_region, 'local_resource_id': local_resource, 'type': 'network_l2', 'remote_resource_id': remote_uuid, }} print(interconnection_data) try: inter_temp = local_net_adap.post( url='/v2.0/inter/interconnections/', json=interconnection_data) except ClientException as e: print( "Exception when contacting the network adapter: " + e.message) workers = len(cidrs_region_network_information) start_interconnection_time = time.time() with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor: for uuid, resources in cidrs_region_network_information.items(): executor.submit(parallel_inters_creation_request, uuid, resources) end_interconnection_time = time.time() return 0 elif opt in ('-d', '--delete'): regions_to_use = [["RegionOne","http://192.168.57.6/identity/v3"],["RegionTwo","http://192.168.57.9/identity/v3"]] for r_element in regions_to_use: FIRST_REGION_NAME, KEYSTONE_ENDPOINT = r_element[0], r_element[1] def get_session_object(auth_param): return session.Session(auth=auth_param) def get_auth_object(keystone_endpoint): return v3.Password( username="******", password="******", project_name="demo", auth_url=keystone_endpoint, user_domain_id="default", project_domain_id="default", include_catalog=True, # Allow fetching a new token if the current one is going to expire reauthenticate=True ) auth = get_auth_object(KEYSTONE_ENDPOINT) sess = get_session_object(auth) # Authenticate auth.get_access(sess) auth_ref = auth.auth_ref #print("Auth token: %s" %auth_ref.auth_token) local_net_adap = Adapter( auth=auth, session=sess, service_type='network', interface='public', region_name=FIRST_REGION_NAME) catalog_endpoints = auth_ref.service_catalog.catalog #print("Service catalog: %s" % catalog_endpoints) regions_list_neu = [] regions_list_key = [] regions_list = [] for obj in catalog_endpoints: if obj['name'] == 'neutron': for endpoint in obj['endpoints']: # print(endpoint) new_endpoint_obj = { 'region_name': endpoint["region"], 'neutron_url': endpoint["url"]} regions_list_neu.append(new_endpoint_obj) if obj['name'] == 'keystone': for endpoint in obj['endpoints']: if endpoint['interface'] == 'public': new_endpoint_obj = { 'region_name': endpoint["region"], 'keystone_url': endpoint["url"]} regions_list_key.append(new_endpoint_obj) # print(endpoint) print(regions_list_neu) print(regions_list_key) for i in range(len(regions_list_neu)): neutron_endpoint = regions_list_neu[i] print(neutron_endpoint) for j in range(len(regions_list_key)): keystone_endpoint = regions_list_key[j] print(keystone_endpoint) if neutron_endpoint['region_name'] == keystone_endpoint['region_name']: new_end = {'region_name': neutron_endpoint['region_name'], 'keystone_url': keystone_endpoint['keystone_url'], 'neutron_url': neutron_endpoint['neutron_url']} regions_list.append(new_end) print(regions_list) print('Starting regions requests') # For every region find the interconnections to be deleted for i in range(len(regions_list)): region_name, region_auth_endpoint, region_neutron_endpoint = regions_list[i]['region_name'], regions_list[i]['keystone_url']+'/v3', regions_list[i]['neutron_url'] auth = get_auth_object(region_auth_endpoint) sess = get_session_object(auth) print('Getting information from region ' + str(region_name)) # Authenticate auth.get_access(sess) auth_ref = auth.auth_ref net_adap = Adapter( auth=auth, session=sess, service_type='network', interface='public', region_name=region_name) per_region_inter_list = net_adap.get('/v2.0/inter/interconnections').json() for interco in per_region_inter_list['interconnections']: interco_id = interco['id'] interco_answer = net_adap.delete('/v2.0/inter/interconnections/' + str(interco_id)) return 0