def extract_interfaces(self, host): try: if self.interfaces: if 'device_role' in host: url = urljoin( self.api_endpoint, "/api/dcim/interfaces/?limit=0&device_id=%s" % (to_text(host["id"]))) elif 'role' in host: url = urljoin( self.api_endpoint, "/api/virtualization/interfaces/?limit=0&virtual_machine_id=%s" % (to_text(host["id"]))) interface_lookup = self.get_resource_list(api_url=url) # Collect all IP Addresses associated with the device device_ipaddresses = self.extract_ipaddresses(host) # Attach the found IP Addresses record to the interface for interface in interface_lookup: interface_ip = [ ipaddress for ipaddress in device_ipaddresses if ipaddress["interface"]["id"] == interface["id"] ] interface["ip-addresses"] = interface_ip return interface_lookup except Exception: return
def parse(self, inventory, loader, path, cache=True): super(InventoryModule, self).parse(inventory, loader, path) self._read_config_data(path=path) token = self.get_option("oauth_token") hostname_preferences = self.get_option("hostnames") group_preferences = self.get_option("groups") if group_preferences is None: group_preferences = [] self.extractors = { "public_ipv4": self.extract_public_ipv4, "private_ipv4": self.extract_private_ipv4, "hostname": self.extract_hostname, } self.group_extractors = { "location": self.extract_location, "offer": self.extract_offer, "rpn": self.extract_rpn } self.headers = { 'Authorization': "Bearer %s" % token, 'User-Agent': "ansible %s Python %s" % (ansible_version, python_version.split(' ')[0]), 'Content-type': 'application/json' } servers_url = urljoin(InventoryModule.API_ENDPOINT, "api/v1/server") servers_api_path = self._fetch_information(url=servers_url) if "rpn" in group_preferences: rpn_groups_url = urljoin(InventoryModule.API_ENDPOINT, "api/v1/rpn/group") rpn_list = self._fetch_information(url=rpn_groups_url) self.rpn_lookup_cache = self.extract_rpn_lookup_cache(rpn_list) for server_api_path in servers_api_path: server_url = urljoin(InventoryModule.API_ENDPOINT, server_api_path) raw_server_info = self._fetch_information(url=server_url) if raw_server_info is None: continue self.do_server_inventory(host_infos=raw_server_info, hostname_preferences=hostname_preferences, group_preferences=group_preferences)
def refresh_url(self): query_parameters = [("limit", 0)] if self.query_filters: query_parameters.extend( filter(lambda x: x, map(self.validate_query_parameters, self.query_filters))) self.device_url = urljoin( self.api_endpoint, "/api/dcim/devices/?" + urlencode(query_parameters)) self.virtual_machines_url = urljoin( self.api_endpoint, "/api/virtualization/virtual-machines/?" + urlencode(query_parameters))
def delete_config(module, api, cur_cfg): if module.check_mode: # If you delete the config, everything but the interface name is nulled # out. Example: # { # "greRemoteIps": null, # "interface": "ext0", # "localIp": null, # "subnetBitLength": null # } # Fake up a DELETE response by creating a new config with all of the # parameters nulled out except for the interface name. want = {k[1]: None for k in PARAM_MAP} want['interface'] = module.params['interface'] return want intf_name = module.params['interface'] if not has_gre_tunnel(module, api, intf_name): return cur_cfg url = urljoin('gre-tunnels/', '{}'.format(intf_name)) resp_code, response = api.delete_config(url) if resp_code not in ResponseCodes.GOOD_DEL_RESP: msg = "Could not delete config (code: {}, resp: {})" raise AEDAPIError(msg.format(resp_code, response)) return get_interface_config(module, api)
def update_config(module, api, cur_cfg): if module.check_mode: # Get the current config and overlay the module parameters in order to # 'fake' an update. new_cfg = dict(cur_cfg) want = ans_to_rest(module.params, PARAM_MAP) new_cfg.update(want) return new_cfg body_params = ans_to_rest(module.params, PARAM_MAP) # Remove the 'interface' key as it should not be included in the param # list. del body_params['interface'] if not body_params: # If body_params is empty, we aren't changing anything, so just return # the current config as the new config. return cur_cfg url = urljoin('gre-tunnels/', '{}'.format(module.params['interface'])) # XXX: We're always using POST here, as it seems to work in every case that # PATCH would work. resp_code, response = api.create_config(url, body_params=body_params) if resp_code not in ResponseCodes.GOOD_RESP: msg = "Could not post new config (code: {}, resp: {})" raise AEDAPIError(msg.format(resp_code, response)) return response
def login(self): ''' Log in to MSO ''' # Perform login request if (self.params.get('login_domain') is not None) and (self.params.get('login_domain') != 'Local'): domain_id = self.get_login_domain_id(self.params.get('login_domain')) payload = {'username': self.params.get('username'), 'password': self.params.get('password'), 'domainId': domain_id} else: payload = {'username': self.params.get('username'), 'password': self.params.get('password')} self.url = urljoin(self.baseuri, 'auth/login') resp, auth = fetch_url(self.module, self.url, data=json.dumps(payload), method='POST', headers=self.headers, timeout=self.params.get('timeout'), use_proxy=self.params.get('use_proxy')) # Handle MSO response if auth.get('status') != 201: self.response = auth.get('msg') self.status = auth.get('status') self.fail_json(msg='Authentication failed: {msg}'.format(**auth)) payload = json.loads(resp.read()) self.headers['Authorization'] = 'Bearer {token}'.format(**payload)
def _fetch_information(token, url): results = [] paginated_url = url while True: try: response = open_url(paginated_url, headers={ 'X-Auth-Token': token, 'Content-type': 'application/json' }) except Exception as e: raise AnsibleError("Error while fetching %s: %s" % (url, to_native(e))) try: raw_json = json.loads(to_text(response.read())) except ValueError: raise AnsibleError("Incorrect JSON payload") try: results.extend(raw_json["servers"]) except KeyError: raise AnsibleError( "Incorrect format from the Scaleway API response") link = response.headers['Link'] if not link: return results relations = parse_pagination_link(link) if 'next' not in relations: return results paginated_url = urllib_parse.urljoin(paginated_url, relations['next'])
def login(self): ''' Log in to MSC ''' # Perform login request self.url = urljoin(self.baseuri, 'auth/login') payload = { 'username': self.params['username'], 'password': self.params['password'] } resp, auth = fetch_url(self.module, self.url, data=json.dumps(payload), method='POST', headers=self.headers, timeout=self.params['timeout'], use_proxy=self.params['use_proxy']) # Handle MSC response if auth['status'] != 201: self.response = auth['msg'] self.status = auth['status'] self.fail_json(msg='Authentication failed: {msg}'.format(**auth)) payload = json.loads(resp.read()) self.headers['Authorization'] = 'Bearer {token}'.format(**payload)
def extract_config_context(self, host): try: url = urljoin(self.api_endpoint, "/api/dcim/devices/" + str(host["id"])) device_lookup = self._fetch_information(url) return [device_lookup["config_context"]] except Exception: return
def request(self, path, method=None, data=None, qs=None): ''' Generic HTTP method for MSO requests. ''' self.path = path if method is not None: self.method = method self.url = urljoin(self.baseuri, path) if qs is not None: self.url = self.url + update_qs(qs) resp, info = fetch_url( self.module, self.url, headers=self.headers, data=json.dumps(data), method=self.method, timeout=self.params['timeout'], use_proxy=self.params['use_proxy'], ) self.response = info['msg'] self.status = info['status'] # 200: OK, 201: Created, 202: Accepted, 204: No Content if self.status in (200, 201, 202, 204): output = resp.read() # if self.method in ('DELETE', 'PATCH', 'POST', 'PUT') and self.status in (200, 201, 204): # self.result['changed'] = True if output: return json.loads(output) # 404: Not Found elif self.method == 'DELETE' and self.status == 404: return {} # 400: Bad Request, 401: Unauthorized, 403: Forbidden, # 405: Method Not Allowed, 406: Not Acceptable # 500: Internal Server Error, 501: Not Implemented elif self.status >= 400: try: payload = json.loads(resp.read()) except Exception: payload = json.loads(info['body']) if 'code' in payload: self.fail_json( msg='MSO Error {code}: {message}'.format(**payload), data=data, info=info, payload=payload) else: self.fail_json(msg='MSO Error:'.format(**payload), data=data, info=info, payload=payload) return {}
def extract_ipaddresses(self, host): try: if self.interfaces: if 'device_role' in host: url = urljoin( self.api_endpoint, "/api/ipam/ip-addresses/?limit=0&device_id=%s" % (to_text(host["id"]))) elif 'role' in host: url = urljoin( self.api_endpoint, "/api/ipam/ip-addresses/?limit=0&virtual_machine_id=%s" % (to_text(host["id"]))) ipaddress_lookup = self.get_resource_list(api_url=url) return ipaddress_lookup except Exception: return
def request_upload(self, path, fields=None): ''' Generic HTTP MultiPart POST method for MSO uploads. ''' self.path = path self.url = urljoin(self.baseuri, path) if not HAS_MULTIPART_ENCODER: self.fail_json(msg='requests-toolbelt is required for the upload state of this module') mp_encoder = MultipartEncoder(fields=fields) self.headers['Content-Type'] = mp_encoder.content_type self.headers['Accept-Encoding'] = "gzip, deflate, br" resp, info = fetch_url(self.module, self.url, headers=self.headers, data=mp_encoder, method='POST', timeout=self.params.get('timeout'), use_proxy=self.params.get('use_proxy')) self.response = info.get('msg') self.status = info.get('status') # Get change status from HTTP headers if 'modified' in info: self.has_modified = True if info.get('modified') == 'false': self.result['changed'] = False elif info.get('modified') == 'true': self.result['changed'] = True # 200: OK, 201: Created, 202: Accepted, 204: No Content if self.status in (200, 201, 202, 204): output = resp.read() if output: return json.loads(output) # 400: Bad Request, 401: Unauthorized, 403: Forbidden, # 405: Method Not Allowed, 406: Not Acceptable # 500: Internal Server Error, 501: Not Implemented elif self.status >= 400: try: payload = json.loads(resp.read()) except (ValueError, AttributeError): try: payload = json.loads(info.get('body')) except Exception: self.fail_json(msg='MSO Error:', info=info) if 'code' in payload: self.fail_json(msg='MSO Error {code}: {message}'.format(**payload), info=info, payload=payload) else: self.fail_json(msg='MSO Error:'.format(**payload), info=info, payload=payload) return {}
def get_interface_config(module, api): intf_name = module.params['interface'] url = urljoin('gre-tunnels/', '{}'.format(intf_name)) resp_code, response = api.get_config(url) if resp_code not in ResponseCodes.GOOD_RESP: # XXX: The API can return different status codes for an invalid # interface (422, 500), instead of just returning a 404. Since we can't # differentiate between 'interface not found' and 'an error occurred' # we'll just punt if the user gives an invalid interface. msg = "Could not get config for interface '{}' (code: {}, resp: {})" raise AEDAPIError(msg.format(intf_name, resp_code, response)) return response
def call(self, operation_id, parameters=None, body=None, uploads=None): method, path = self.operations[operation_id] path_spec = self.api_spec["paths"][path] method_spec = path_spec[method] if parameters is None: parameters = {} else: parameters = parameters.copy() if any( self.extract_params("cookie", path_spec, method_spec, parameters)): raise NotImplementedError("Cookie parameters are not implemented.") headers = self.extract_params("header", path_spec, method_spec, parameters) for name, value in self.extract_params("path", path_spec, method_spec, parameters).items(): path = path.replace("{" + name + "}", value) query_string = urlencode(self.extract_params("query", path_spec, method_spec, parameters), doseq=True) if any(parameters): raise Exception( "Parameter [{names}] not available for {operation_id}.".format( names=", ".join(parameters.keys()), operation_id=operation_id)) url = urljoin(self.base_url, path) if query_string: url += "?" + query_string data = self.render_body(path_spec, method_spec, headers, body, uploads) result = self._session.open(method, url, data=data, headers=headers, unix_socket=self.unix_socket).read() if result: return json.loads(result) return None
def parse(self, inventory, loader, path, cache=True): # Plugin interface (2) super(InventoryModule, self).parse(inventory, loader, path) self._read_config_data(path) host = self.get_option('host') ping_url = urljoin(host, 'api/v2/ping') try: response = Request().get(ping_url) text = response.read() data = json.loads(text) except (ConnectionError, urllib_error.URLError) as e: raise AnsibleParserError( "Unable to connect Tower or AWX server %s: %s" % (host, e)) except (ValueError, TypeError) as e: raise AnsibleParserError( 'Failed to parse json data from host, error: %s, data: %s' % (e, text)) for instance_data in data['instances']: self.inventory.add_host( instance_data['node']) # Inventory interface (1) self.inventory.set_variable( instance_data['node'], 'capacity', instance_data['capacity']) # inventory interface (2) for group_data in data['instance_groups']: group_name = self.inventory.add_group( group_data['name']) # Inventory interface (3) self.inventory.set_variable( group_name, 'group_capacity', group_data['capacity']) # Inventory interface (2) for instance in group_data['instances']: self.inventory.add_child(group_name, instance) # Inventory interface (4)
def load_api(self, refresh_cache=False): xdg_cache_home = os.environ.get("XDG_CACHE_HOME") or "~/.cache" apidoc_cache = os.path.join( os.path.expanduser(xdg_cache_home), "squeezer", self.base_url.replace(":", "_").replace("/", "_"), "api.json", ) try: if refresh_cache: raise IOError() with open(apidoc_cache) as f: data = f.read() except IOError: makedirs(os.path.dirname(apidoc_cache), exist_ok=True) with open(apidoc_cache, "wb") as f: f.write( self._session.open( "GET", urljoin(self.base_url, self.doc_path) ).read() ) with open(apidoc_cache) as f: data = f.read() self.api_spec = json.loads(data) if self.api_spec.get("swagger") == "2.0": self.openapi_version = 2 elif self.api_spec.get("openapi", "").startswith("3."): self.openapi_version = 3 else: raise NotImplementedError("Unknown schema version") self.operations = { method_entry["operationId"]: (method, path) for path, path_entry in self.api_spec["paths"].items() for method, method_entry in path_entry.items() if method in {"get", "put", "post", "delete", "options", "head", "patch", "trace"} }
def parse(self, inventory, loader, path, cache=True): super(InventoryModule, self).parse(inventory, loader, path) self._read_config_data(path=path) hostname_preference = self.get_option("hostname") group_preferences = self.get_option("groups") if group_preferences is None: group_preferences = [] self.extractors = { "private_ipv4": self.extract_private_ipv4, "id": self.extract_id, } self.group_extractors = {"os": self.extract_os_name_for_group} self.headers = { 'User-Agent': "ansible %s Python %s" % (ansible_version, python_version.split(' ')[0]), 'Content-type': 'application/json' } servers_url = urljoin(InventoryModule.API_ENDPOINT, "deployment") deployment_info = self._fetch_information(url=servers_url) if deployment_info is None: self.display.error( "Error occurred. No inventory could be fetched from UDF API.") return for component_info in deployment_info['deployment']['components']: self.do_server_inventory(host_infos=component_info, hostname_preference=hostname_preference, group_preferences=group_preferences)
def refresh_regions_lookup(self): url = urljoin(self.api_endpoint, "/api/dcim/regions/?limit=0") regions = self.get_resource_list(api_url=url) self.regions_lookup = dict( (region["id"], region["name"]) for region in regions)
def request_download(self, path, destination=None): self.url = urljoin(self.baseuri, path) redirected = False redir_info = {} redirect = {} src = self.params.get('src') if src: try: self.headers.update({'Content-Length': os.stat(src).st_size}) data = open(src, 'rb') except OSError: self.fail_json(msg='Unable to open source file %s' % src, elapsed=0) else: pass data = None kwargs = {} if destination is not None: if os.path.isdir(destination): # first check if we are redirected to a file download check, redir_info = fetch_url( self.module, self.url, headers=self.headers, method='GET', timeout=self.params.get('timeout')) # if we are redirected, update the url with the location header, # and update dest with the new url filename if redir_info['status'] in (301, 302, 303, 307): self.url = redir_info.get('location') redirected = True destination = os.path.join( destination, check.headers.get("Content-Disposition").split("filename=") [1]) # if destination file already exist, only download if file newer if os.path.exists(destination): kwargs['last_mod_time'] = datetime.datetime.utcfromtimestamp( os.path.getmtime(destination)) resp, info = fetch_url(self.module, self.url, data=data, headers=self.headers, method='GET', timeout=self.params.get('timeout'), unix_socket=self.params.get('unix_socket'), **kwargs) try: content = resp.read() except AttributeError: # there was no content, but the error read() may have been stored in the info as 'body' content = info.pop('body', '') if src: # Try to close the open file handle try: data.close() except Exception: pass redirect['redirected'] = redirected or info.get('url') != self.url redirect.update(redir_info) redirect.update(info) write_file(self.module, self.url, destination, content, redirect) return redirect, destination
def get_collection_versions(self, namespace, name): """ Gets a list of available versions for a collection on a Galaxy server. :param namespace: The collection namespace. :param name: The collection name. :return: A list of versions that are available. """ relative_link = False if 'v3' in self.available_api_versions: api_path = self.available_api_versions['v3'] pagination_path = ['links', 'next'] relative_link = True # AH pagination results are relative an not an absolute URI. else: api_path = self.available_api_versions['v2'] pagination_path = ['next'] page_size_name = 'limit' if 'v3' in self.available_api_versions else 'page_size' versions_url = _urljoin( self.api_server, api_path, 'collections', namespace, name, 'versions', '/?%s=%d' % (page_size_name, COLLECTION_PAGE_SIZE)) versions_url_info = urlparse(versions_url) # We should only rely on the cache if the collection has not changed. This may slow things down but it ensures # we are not waiting a day before finding any new collections that have been published. if self._cache: server_cache = self._cache.setdefault(get_cache_id(versions_url), {}) modified_cache = server_cache.setdefault('modified', {}) try: modified_date = self.get_collection_metadata(namespace, name).modified_str except GalaxyError as err: if err.http_code != 404: raise # No collection found, return an empty list to keep things consistent with the various APIs return [] cached_modified_date = modified_cache.get( '%s.%s' % (namespace, name), None) if cached_modified_date != modified_date: modified_cache['%s.%s' % (namespace, name)] = modified_date if versions_url_info.path in server_cache: del server_cache[versions_url_info.path] self._set_cache() error_context_msg = 'Error when getting available collection versions for %s.%s from %s (%s)' \ % (namespace, name, self.name, self.api_server) try: data = self._call_galaxy(versions_url, error_context_msg=error_context_msg, cache=True) except GalaxyError as err: if err.http_code != 404: raise # v3 doesn't raise a 404 so we need to mimick the empty response from APIs that do. return [] if 'data' in data: # v3 automation-hub is the only known API that uses `data` # since v3 pulp_ansible does not, we cannot rely on version # to indicate which key to use results_key = 'data' else: results_key = 'results' versions = [] while True: versions += [v['version'] for v in data[results_key]] next_link = data for path in pagination_path: next_link = next_link.get(path, {}) if not next_link: break elif relative_link: # TODO: This assumes the pagination result is relative to the root server. Will need to be verified # with someone who knows the AH API. # Remove the query string from the versions_url to use the next_link's query versions_url = urljoin(versions_url, urlparse(versions_url).path) next_link = versions_url.replace(versions_url_info.path, next_link) data = self._call_galaxy(to_native(next_link, errors='surrogate_or_strict'), error_context_msg=error_context_msg, cache=True) self._set_cache() return versions
def request(self, path, method=None, data=None, qs=None): ''' Generic HTTP method for MSO requests. ''' self.path = path if method is not None: self.method = method # If we PATCH with empty operations, return if method == 'PATCH' and not data: return {} self.url = urljoin(self.baseuri, path) if qs is not None: self.url = self.url + update_qs(qs) resp, info = fetch_url( self.module, self.url, headers=self.headers, data=json.dumps(data), method=self.method, timeout=self.params.get('timeout'), use_proxy=self.params.get('use_proxy'), ) self.response = info.get('msg') self.status = info.get('status') # self.result['info'] = info # Get change status from HTTP headers if 'modified' in info: self.has_modified = True if info.get('modified') == 'false': self.result['changed'] = False elif info.get('modified') == 'true': self.result['changed'] = True # 200: OK, 201: Created, 202: Accepted, 204: No Content if self.status in (200, 201, 202, 204): output = resp.read() if output: return json.loads(output) # 404: Not Found elif self.method == 'DELETE' and self.status == 404: return {} # 400: Bad Request, 401: Unauthorized, 403: Forbidden, # 405: Method Not Allowed, 406: Not Acceptable # 500: Internal Server Error, 501: Not Implemented elif self.status >= 400: try: output = resp.read() payload = json.loads(output) except (ValueError, AttributeError): try: payload = json.loads(info.get('body')) except Exception: self.fail_json(msg='MSO Error:', data=data, info=info) if 'code' in payload: self.fail_json( msg='MSO Error {code}: {message}'.format(**payload), data=data, info=info, payload=payload) else: self.fail_json(msg='MSO Error:'.format(**payload), data=data, info=info, payload=payload) return {}
def _download_api(self): return self._session.open( "GET", urljoin(self.base_url, self.doc_path), unix_socket=self.unix_socket ).read()
def refresh_sites_lookup(self): url = urljoin(self.api_endpoint, "/api/dcim/sites/?limit=0") sites = self.get_resource_list(api_url=url) self.sites_lookup = dict((site["id"], site["name"]) for site in sites)
def refresh_platforms_lookup(self): url = urljoin(self.api_endpoint, "/api/dcim/platforms/?limit=0") platforms = self.get_resource_list(api_url=url) self.platforms_lookup = dict( (platform["id"], platform["name"]) for platform in platforms)
def get_plugin_info(module, plugin_manager_url, intellij_home, plugin_id): build_number = get_build_number(module, intellij_home) params = {'action': 'download', 'build': build_number, 'id': plugin_id} query_params = urlencode(params) url = '%s?%s' % (plugin_manager_url, query_params) for _ in range(0, 3): resp, info = fetch_url(module, url, method='HEAD', timeout=3, follow_redirects=False) if resp is not None: resp.close() status_code = info['status'] if status_code == 404: module.fail_json(msg='Unable to find plugin "%s" for build "%s"' % (plugin_id, build_number)) if status_code > -1 and status_code < 400: break # 3 retries 5 seconds appart time.sleep(5) if status_code == -1 or status_code >= 400: module.fail_json(msg='Error querying url "%s": %s' % (url, info['msg'])) location = info.get('location') if location is None: location = info.get('Location') if location is None: module.fail_json(msg='Unsupported HTTP response for: %s (status=%s)' % (url, status_code)) if location.startswith('http'): plugin_url = location else: plugin_url = urljoin(plugin_manager_url, location) jar_pattern = re.compile(r'/(?P<file_name>[^/]+\.jar)(?:\?.*)$') jar_matcher = jar_pattern.search(plugin_url) if jar_matcher: file_name = jar_matcher.group('file_name') else: versioned_pattern = re.compile( r'(?P<plugin_id>[0-9]+)/(?P<update_id>[0-9]+)/' r'(?P<file_name>[^/]+)(?:\?.*)$') versioned_matcher = versioned_pattern.search(plugin_url) if versioned_matcher: file_name = '%s-%s-%s' % (versioned_matcher.group('plugin_id'), versioned_matcher.group('update_id'), versioned_matcher.group('file_name')) else: hash_object = hashlib.sha256(plugin_url) file_name = '%s-%s.zip' % (plugin_id, hash_object.hexdigest()) return plugin_url, file_name
def refresh_tenants_lookup(self): url = urljoin(self.api_endpoint, "/api/tenancy/tenants/?limit=0") tenants = self.get_resource_list(api_url=url) self.tenants_lookup = dict( (tenant["id"], tenant["name"]) for tenant in tenants)
def refresh_racks_lookup(self): url = urljoin(self.api_endpoint, "/api/dcim/racks/?limit=0") racks = self.get_resource_list(api_url=url) self.racks_lookup = dict((rack["id"], rack["name"]) for rack in racks)
def refresh_manufacturers_lookup(self): url = urljoin(self.api_endpoint, "/api/dcim/manufacturers/?limit=0") manufacturers = self.get_resource_list(api_url=url) self.manufacturers_lookup = dict( (manufacturer["id"], manufacturer["name"]) for manufacturer in manufacturers)
def refresh_device_types_lookup(self): url = urljoin(self.api_endpoint, "/api/dcim/device-types/?limit=0") device_types = self.get_resource_list(api_url=url) self.device_types_lookup = dict( (device_type["id"], device_type["model"]) for device_type in device_types)