def check_api_versions(self, api_versions): """Check that the server supports the given API versions.""" if self.conn is None: raise RvbdException("Not connected") try: self.supported_versions = self._get_supported_versions() except RvbdHTTPException as e: if e.status != 404: raise logger.warning("Failed to retrieved supported versions") self.supported_versions = None if self.supported_versions is None: return False logger.debug("Server supports the following services: %s" % (",".join([str(v) for v in self.supported_versions]))) for v in api_versions: if v in self.supported_versions: self.api_version = v logger.debug("Service '%s' supports version '%s'" % (self.service, v)) return True msg = ("API version(s) %s not supported (supported version(s): %s)" % (', '.join([str(v) for v in api_versions]), ', '.join( [str(v) for v in self.supported_versions]))) raise RvbdException(msg)
def authenticate(self, auth): """Authenticate with device using the defined authentication method. This sets up the appropriate authentication headers to access restricted resources. `auth` must be an instance of either UserAuth or OAuth.""" assert auth is not None self.auth = auth self._detect_auth_methods() if self._supports_auth_oauth and Auth.OAUTH in self.auth.methods: # TODO fix for future support to handle appropriate triplets code = self.auth.access_code path = '/api/common/1.0/oauth/token' data = {'grant_type': 'access_code', 'assertion': code} answer = self.conn.json_request('POST', path, 'POST', params=data) token = answer['access_token'] st = token.split('.') if len(st) == 1: auth_header = 'Bearer %s' % token elif len(st) == 3: auth_header = 'SignedBearer %s' % token else: raise RvbdException('Unknown OAuth response from server: %s' % st) self.conn.add_headers({'Authorization': auth_header}) logger.info('Authenticated using OAUTH2.0') elif self._supports_auth_cookie and Auth.COOKIE in self.auth.methods: path = '/api/common/1.0/login' data = { "username": self.auth.username, "password": self.auth.password } response_body, http_response = self.conn.json_request( 'POST', path, body=data, raw_response=True) # we're good, set up our http headers for subsequent # requests! cookie = http_response.headers['set-cookie'] self.conn.add_headers({'Cookie': cookie}) logger.info("Authenticated using COOKIE") elif self._supports_auth_basic and Auth.BASIC in self.auth.methods: # Use HTTP Basic authentication s = base64.b64encode("%s:%s" % (self.auth.username, self.auth.password)) self.conn.add_headers({'Authorization': 'Basic %s' % s}) logger.info("Authenticated using BASIC") else: raise RvbdException("No supported authentication methods")
def get_columns(self, columns, groupby=None, strict=True): """Return valid Column objects for list of columns :param list columns: list of strings, Column objects, or JSON dicts defining a column :param str groupby: will optionally ensure that the selected columns are valid for the given groupby :param bool strict: If True (default), will validate input against known Columns or create ephemeral columns for dynamic reports. If False, will avoid validation and process input as given. Used in some template or MultiQuery scenarios where the columns aren't specific to a known realm/groupby pairing. Note that this function may be incomplete for any given groupby. """ res = list() if groupby: groupby_cols = self.search_columns(groupbys=[groupby]) else: groupby_cols = None colnames = self.colnames for column in columns: if isinstance(column, types.StringTypes): cname = column elif isinstance(column, Column): # usually a Column class cname = column.key else: # otherwise, likely a json-dict column definition # as returned by a query for a report legend # if we are not in strict mode, process the json as a given # column object if column['id'] >= _constants.EPHEMERAL_COLID or not strict: # Ephemeral column, create a new column and don't # do any validation res.extend(self._gencolumns([column])) continue strid = column['strid'] cname = strid.lower()[3:] if cname not in colnames: raise RvbdException('{0} is not a valid column ' 'for this netprofiler'.format(column)) if groupby_cols and cname not in groupby_cols: raise RvbdException('{0} is not a valid column ' 'for groupby {1}'.format(column, groupby)) res.append(self.columns[cname]) return res
def run(self, lan_interfaces, wan_interfaces, direction, columns=None, timefilter='last 1 h', trafficexpr=None, groupby=None, resolution='auto'): """ Run WAN Time Series Report :param lan_interfaces: list of full interface name for LAN interface, e.g. ['10.99.16.252:1'] :param wan_interfaces: list of full interface name for WAN interface :param direction: :type direction: 'inbound' or 'outbound' :param columns: list of columns available in both `in_` and `out_` versions, for example, ['avg_bytes', 'total_bytes'], instead of ['in_avg_bytes', 'out_avg_bytes'] :param str groupby: Ignored for this report type, included for interface compatibility """ # we need some heavier data analysis tools for this report import pandas as pd self.columns = columns self.timefilter = timefilter self.trafficexpr = trafficexpr self.resolution = resolution self.groupby = 'tim' self._configure() self._convert_columns() lan_data, wan_data = self._run_reports(lan_interfaces, wan_interfaces) key_columns = [c.key for c in self.columns if c.iskey] if key_columns[0] != 'time': raise RvbdException('Invalid Key Column for WANTimeSeriesReport') labels = [c.key for c in self.columns] # create data frames, and convert timestamps df_lan = pd.DataFrame.from_records(lan_data, columns=labels) df_lan.set_index('time', inplace=True) df_lan.index = df_lan.index.map(int).astype('M8[s]') df_wan = pd.DataFrame.from_records(wan_data, columns=labels) df_wan.set_index('time', inplace=True) df_wan.index = df_wan.index.map(int).astype('M8[s]') # remove and rename columns appropriately lan_columns, wan_columns = self._align_columns(direction, df_lan, df_wan) self.table = lan_columns.join(wan_columns, how='inner')
def load(self): """Load settings and groups.""" if self.id is None: raise RvbdException('Type: "{0}" has not yet been saved to the ' 'Netprofiler, so there is nothing to load. ' 'Call $host_group_type.save() first to save it.' .format(self.name)) info = self.netprofiler.api.host_group_types.get(self.id) self.name = info['name'] self.favorite = info['favorite'] self.description = info['description'] try: self.config = self.netprofiler.api.host_group_types.get_config(self.id) except RvbdHTTPException as e: # When you call get_config a RESOURCE_NOT_FOUND error is raised if # the config of that type is empty, even if the host group type # exists. Because of this we except that error and move on with load if e.error_id == 'RESOURCE_NOT_FOUND': logger.debug('RESOURCE_NOT_FOUND exception raised because the ' 'config is empty. It was excepted because we ' 'still want the HostGroupType.') self.config = [] self.groups = {} else: raise e # Get the groups, we will need to reformat the output to fit our dict. for entry in self.config: if entry['name'] not in self.groups: # Note -- only need to create a HostGroup for the *first* # entry found for a given name, as HostGroup doesn't actually # store data, it reference back to this objects 'config' # property HostGroup(self, entry['name'])
def _align_columns(self, direction, df_lan, df_wan): """Replace lan and wan dataframe columns with those appropriate for inbound/outbound data. """ # To help illustrate, column prefixes are as follows: # # LAN WAN # Inbound <out_> <in_> # Outbound <in_> <out_> # create boolean lists for in, out, and universal columns in_flags = [c.startswith('in_') for c in df_lan.keys()] out_flags = [c.startswith('out_') for c in df_lan.keys()] key_flags = [not x and not y for x, y in zip(in_flags, out_flags)] if direction == 'inbound': lan_flags = [x or y for x, y in zip(key_flags, out_flags)] lan_columns = df_lan.ix[:, lan_flags] lan_columns.rename(columns=lambda n: n.replace('out_', 'LAN_'), inplace=True) wan_columns = df_wan.ix[:, in_flags] wan_columns.rename(columns=lambda n: n.replace('in_', 'WAN_'), inplace=True) elif direction == 'outbound': lan_flags = [x or y for x, y in zip(key_flags, in_flags)] lan_columns = df_lan.ix[:, lan_flags] lan_columns.rename(columns=lambda n: n.replace('in_', 'LAN_'), inplace=True) wan_columns = df_wan.ix[:, out_flags] wan_columns.rename(columns=lambda n: n.replace('out_', 'WAN_'), inplace=True) else: raise RvbdException('Invalid direction %s for WANSummaryReport' % direction) return lan_columns, wan_columns
def xml_request(self, method, path, body=None, params=None, extra_headers=None, raw_response=False): """Send an XML request to the host. The Content-Type and Accept headers are set to text/xml. In addition, any response will be XML-decoded as an xml.etree.ElementTree. The body is assumed to be an XML encoded text string and is inserted into the HTTP payload as-is. """ extra_headers = self._prepare_headers(extra_headers) extra_headers['Content-Type'] = 'text/xml' extra_headers['Accept'] = 'text/xml' r = self._request(method, path, body, params, extra_headers) t = r.headers.get('Content-type', None) if t.find('text/xml') == -1: raise RvbdException('unexpected content type %s' % t) tree = ElementTree.fromstring(r.text.encode('ascii', 'ignore')) if raw_response: return tree, r return tree
def import_file(self, f, name): try: if name in sys.modules: reload(sys.modules[name]) self.stdout.write('reloading %s as %s\n' % (f, name)) else: __import__(name) self.stdout.write('importing %s as %s\n' % (f, name)) except RvbdHTTPException as e: instance = RvbdException('From config file "%s": %s\n' % (name, e.message)) raise RvbdException, instance, sys.exc_info()[2] except SyntaxError as e: msg_format = '%s: (file: %s, line: %s, offset: %s)\n%s\n' message = msg_format % (e.msg, e.filename, e.lineno, e.offset, e.text) instance = type(e)('From config file "%s": %s\n' % (name, message)) raise type(e), instance, sys.exc_info()[2] except Exception as e: instance = type(e)('From config file "%s": %s\n' % (name, str(e))) raise type(e), instance, sys.exc_info()[2]
def get_columns(self, columns, groupby=None): """Return valid Column objects for list of columns :param list columns: list of strings, Column objects, or JSON dicts defining a column :param str groupby: will optionally ensure that the selected columns are valid for the given groupby Note that this function may be incomplete for any given groupby. """ res = list() if groupby: groupby_cols = self.search_columns(groupbys=[groupby]) else: groupby_cols = None colnames = self.colnames for column in columns: if isinstance(column, types.StringTypes): cname = column elif isinstance(column, Column): # usually a Column class cname = column.key else: # otherwise, likely a json-dict column definition # as returned by a query for a report legend if column['id'] >= _constants.EPHEMERAL_COLID: # Ephemeral column, create a new column and don't # do any validation res.extend(self._gencolumns([column])) continue strid = column['strid'] cname = strid.lower()[3:] if cname not in colnames: raise RvbdException('{0} is not a valid column ' 'for this netprofiler'.format(column)) if groupby_cols and cname not in groupby_cols: raise RvbdException('{0} is not a valid column ' 'for groupby {1}'.format(column, groupby)) res.append(self.columns[cname]) return res
def __init__(self, hostname, auth=None, port=None, verify=True, reauthenticate_handler=None): """ Initialize new connection and setup authentication `hostname` - include protocol, e.g. "https://host.com" `auth` - authentication object, see below `port` - optional port to use for connection `verify` - require SSL certificate validation. Authentication: For simple basic auth, passing a tuple of (user, pass) is sufficient as a shortcut to an instance of HTTPBasicAuth. This auth method will trigger a check to ensure the protocol is using SSL to connect (though cert verification may still be turned off to avoid errors with self-signed certs). OAuth2 will require the ``requests-oauthlib`` package and an instance of the `OAuth2Session` object. netrc config files will be checked if auth is left as None. If no authentication is provided for the hostname in the netrc file, or no file exists, an error will be raised when trying to connect. """ p = parse_url(hostname) if p.port and port and p.port != port: raise RvbdException('Mismatched ports provided.') elif not p.port and port: hostname = hostname + ':' + str(port) if not p.scheme: # default to https, except when port 80 specified if parse_url(hostname).port == '80': logger.info("Connection defaulting to 'http://' scheme.") hostname = 'http://' + hostname else: logger.info("Connection defaulting to 'https://' scheme.") hostname = 'https://' + hostname self.hostname = hostname self._ssladapter = False if self.HTTPLIB_DEBUGLEVEL > 0: self.set_debuglevel() self.conn = requests.session() self.conn.auth = auth self.conn.verify = verify self._reauthenticate_handler = reauthenticate_handler # store last full response self.response = None logger.debug("Connection initialized for %s" % self.hostname)
def delete(self): """Delete this host group type and all groups.""" if self.id is None: raise RvbdException('Type: "{0}" has not yet been saved to the ' 'Netprofiler, so there is nothing to delete. ' 'Call $host_group_type.save() first to save it.' .format(self.name)) self.netprofiler.api.host_group_types.delete(self.id) self.id = None
def _add_host_group(self, new_host_group): """ Add a new host group to groups dictionary. :param new_host_group: the new HostGroup to be added """ if new_host_group.name in self.groups.keys(): raise RvbdException('Host group: "{0}" already exists.'.format( new_host_group.name)) self.groups[new_host_group.name] = new_host_group
def _find_id(cls, netprofiler, name): # Get the ID of the host type specified by name host_types = netprofiler.api.host_group_types.get_all() target_type_id = None for host_type in host_types: if name == host_type['name']: target_type_id = host_type['id'] break # If target_type_id is still None, then we didn't find that host if target_type_id is None: raise RvbdException('{0} is not a valid type name ' 'for this netprofiler'.format(name)) return target_type_id
def __init__(self, hostgrouptype, name): """New object representing a host group by name. The new :class:`HostGroup` will be automatically added to the provided :class:`HostGroupType` and can be accessed with:: host_group_type.groups['group_name'] """ if not isinstance(name, basestring): raise RvbdException("This host group's name is not a string.") self.host_group_type = hostgrouptype self.name = name self.host_group_type._add_host_group(self)
def get_interfaces(self, device_ip): """ Query netprofiler to attempt to automatically determine LAN and WAN interface ids. """ cols = self.profiler.get_columns(['interface_dns', 'interface']) super(WANReport, self).run(realm='traffic_summary', groupby='ifc', columns=cols, timefilter=TimeFilter.parse_range('last 1 h'), trafficexpr=TrafficFilter('device %s' % device_ip), centricity='int', resolution='auto', sync=True) interfaces = self._get_data() lan = [address for name, address in interfaces if 'lan' in name] wan = [address for name, address in interfaces if 'wan' in name] if not lan or not wan: raise RvbdException('Unable to determine LAN and WAN interfaces for device %s' % device_ip) return lan, wan
def download(self, url, path=None, overwrite=False, method='GET', extra_headers=None, params=None): """Download a file from a remote URI and save it to a local path. `url` is the url of the file to download. `path` is an optional path on the local filesystem to save the downloaded file. It can be: - a complete path - a directory In the first case the file will have the specified name and extension. In the second case the filename will be retrieved by the 'Content-Disposition' HTTP header. If a path cannot be determined, a ValueError is raised. `overwrite` if True will save the downloaded file to `path` no matter if the file already exists. `method` is the HTTP method used for the request. `extra_headers` is a dictionary of headers to use for the request. `params` is a dictionary of parameters for the request. """ filename = None # try to determine the filename if path is None: directory = tempfile.mkdtemp() else: if os.path.isdir(path): directory = path elif path[-1] == os.sep: # we got a path which is a directory that doesn't exists msg = "{0} directory does not exist.".format(path) raise ValueError(msg) else: # last case, we got a full path of a file directory, filename = os.path.split(path) # Initiate the request # XXX Handle cases where a Keep-Alive header is passed back in Response # include "Connection: Close" as part of the request header # otherwise a Keep-Alive response from the server will hang and block # our connection until the system timeout (defaults to 100sec in one # implementation) # extra_headers = self._prepare_headers(extra_headers) extra_headers['Connection'] = 'Close' r = self._request(method, url, None, params, extra_headers, stream=True) try: # Check if the user specified a file name if filename is None: # Retrieve the file name form the HTTP header filename = r.headers.get('Content-Disposition', None) if filename is not None: filename = filename.split('=')[1] if not filename: raise ValueError("{0} is not a valid path. Specify a full path" " for the file to be created".format(path)) # Compose the path path = os.path.join(directory, filename) # Check if the local file already exists if os.path.isfile(path) and not overwrite: raise RvbdException('the file %s already exists' % path) # Stream the remote file to the local file with open(path, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: f.write(chunk) f.flush() finally: r.close() return path
def upload_file(self, path, files, body=None, params=None, extra_headers=None, file_headers=None, field_name='file', raw_response=False): """ Executes a POST to upload a file or files. :param path: The full or relative URL of the file upload API :param files: Can be a string that is the full path to a file to be uploaded OR it can be a tuple/list of strings that are each the full path to a file to be uploaded. :param body: Optional body. If present must be a dictionary. :param params: optional URL params :param extra_headers: Optional headers :param file_headers: Optional headers to include with the multipart file data. Default is {'Expires': '0'}. Pass in an empty dict object if you would not like to include any file_headers in the multipart data. :param field_name: The name of the form field on the destination that will receive the posted multipart data. Default is 'file' :param raw_response: False (defualt) results in the function returning only the decoded JSON response present in the response body. If set to True then the funciton will return a tuple of the decoded JSON body and the full response object. Set to True if you want to inspect the result code or response headers. :return: See 'raw_response' for details on the returned data. """ # the underlying request object will add the correct content type # header extra_headers = self._prepare_headers(extra_headers) extra_headers['Accept'] = 'application/json' # by default add a zero expiration header. if file_headers is None: file_headers = {'Expires': '0'} # body must be a dict object for this call. if (body is not None) and (not isinstance(body, dict)): raise RvbdException("The 'body' argument must either be None or a " "dict") # Open all of the files xfiles = dict() if isinstance(files, str): try: xfiles[basename(files)] = {'file': open(files, 'rb')} except IOError: raise RvbdException("Could not open '{0}' for read in binary " "mode. Please check path.".format(files)) elif isinstance(files, (list, tuple)): for file in files: try: xfiles[basename(file)] = {'file': open(file, 'rb')} except IOError: raise RvbdException("Could not open '{0}' for read in " "binary mode. Please check path." "".format(file)) else: raise RvbdException("upload_file 'files' argument must be a " "string or list type (list, tuple). {0} is " "not a valid files argument." "".format(type(files))) # build the multipart content from the files if len(xfiles) == 1: # single file is a dict object for f in xfiles: mtype, _ = mimetypes.guess_type(f) if mtype and list(file_headers.keys()): req_files = { field_name: (f, xfiles[f]['file'], mtype, file_headers) } elif mtype and not list(file_headers.keys()): req_files = {field_name: (f, xfiles[f]['file'], mtype)} else: req_files = {field_name: (f, xfiles[f]['file'])} elif len(xfiles) > 1: # multiple files is a list req_files = list() for f in xfiles: mtype, _ = mimetypes.guess_type(f) if mtype and list(file_headers.keys()): req_files.append((field_name, (f, xfiles[f]['file'], mtype, file_headers))) elif mtype and not list(file_headers.keys()): req_files.append( (field_name, (f, xfiles[f]['file'], mtype))) else: req_files.append((field_name, (f, xfiles[f]['file']))) else: raise RvbdException("At least one valid file required. Files was: " "{0}".format(files)) # send the files r = self._request("POST", path, body, params, extra_headers, files=req_files) if r.status_code == 204 or len(r.content) == 0: data = None # no data else: data = json.loads(r.text) if raw_response: return data, r return data
def authenticate(self, auth): """Authenticate with device using the defined authentication method. This sets up the appropriate authentication headers to access restricted resources. `auth` must be an instance of either UserAuth or OAuth.""" assert auth is not None self.auth = auth self._detect_auth_methods() if self._supports_auth_oauth and Auth.OAUTH in self.auth.methods: path = '/api/common/1.0/oauth/token' assertion = '.'.join([ base64.urlsafe_b64encode('{"alg":"none"}'), self.auth.access_code, '' ]) state = md5.md5(str(time.time())).hexdigest() data = { 'grant_type': 'access_code', 'assertion': assertion, 'state': state } answer = self.conn.urlencoded_request('POST', path, body=data) if answer.json()['state'] != state: msg = "Inconsistent state value in OAuth response" raise RvbdException(msg) token = answer.json()['access_token'] st = token.split('.') if len(st) == 1: auth_header = 'Bearer %s' % token elif len(st) == 3: auth_header = 'SignedBearer %s' % token else: msg = 'Unknown OAuth response from server: %s' % st raise RvbdException(msg) self.conn.add_headers({'Authorization': auth_header}) logger.info('Authenticated using OAUTH2.0') elif self._supports_auth_cookie and Auth.COOKIE in self.auth.methods: path = '/api/common/1.0/login' data = { "username": self.auth.username, "password": self.auth.password } body, http_response = self.conn.json_request('POST', path, body=data, raw_response=True) # we're good, set up our http headers for subsequent # requests! self.conn.cookies = http_response.cookies logger.info("Authenticated using COOKIE") elif self._supports_auth_basic and Auth.BASIC in self.auth.methods: # Use HTTP Basic authentication s = base64.b64encode("%s:%s" % (self.auth.username, self.auth.password)) self.conn.add_headers({'Authorization': 'Basic %s' % s}) logger.info("Authenticated using BASIC") else: raise RvbdException("No supported authentication methods")
def _verify_cache(self, refetch=False): """Retrieve all the possible combinations of groupby, centricity and realm using the rule shown under the search_columns method. By default, all these permutations will be checked against the current local cache file, and any missing keys will be retrieved from the server. :param bool refetch: will force an api refresh call from the machine even if the data can be found in local cache. """ columns = list() write = False have_exception = False for realm in self.realms: if realm == 'traffic_flow_list' or realm == 'identity_list': centricities = ['hos'] elif realm == 'msq': centricities = ['hos'] else: centricities = self.centricities for centricity in centricities: if realm == 'traffic_summary': groupbys = [ x for x in self.groupbys.values() if x not in ['thu', 'slm'] ] elif 'time_series' in realm: groupbys = ['tim'] elif realm == 'identity_list': groupbys = ['thu'] elif realm == 'msq': groupbys = ['slm'] else: groupbys = ['hos'] for groupby in groupbys: _hash = make_hash(realm, centricity, groupby) if refetch or _hash not in self._columns_file.data: logger.debug('Requesting columns for triplet: ' '%s, %s, %s' % (realm, centricity, groupby)) try: api_call = self.api.report.columns( realm, centricity, groupby) except RvbdHTTPException as e: logger.warning('Exception raised fetching columns' 'for triplet: {0}, {1}, {2} with ' 'message {3}'.format( realm, centricity, groupby, e.message)) have_exception = True continue # generate Column objects from json api_columns = self._gencolumns(api_call) # compare against objects we've already retrieved existing = [c for c in columns if c in api_columns] new_columns = [ c for c in api_columns if c not in existing ] columns.extend(new_columns) # add them to data, preserving existing objects self._columns_file.data[_hash] = (existing + new_columns) write = True if write: self._columns_file.version = _constants.CACHE_VERSION self._columns_file.write() elif have_exception: logger.warning('_verify_cache: Some realm, centricity, ' 'and groupby triplets failed.') if not self._columns_file.data: raise RvbdException("_verify_cache failed to collect both" "cached and live data. Please check" "NetProfiler health")