コード例 #1
0
def CaptureJobService(appresponse):
    """Factory to determine appropriate CaptureJob class"""

    version = APIVersion(appresponse.versions['npm.packet_capture'])
    if version < APIVersion('2.0'):
        return PacketCapture10(appresponse)
    else:
        return PacketCapture20(appresponse)
コード例 #2
0
    def _detect_auth_methods(self):
        if self.api_version >= APIVersion("4.0"):
            # 4.0 netshark supports the standard GL7 method
            return Service._detect_auth_methods(self)

        self._supports_auth_basic = self.api_version >= APIVersion("3.0")
        self._supports_auth_cookie = self.api_version >= APIVersion("3.1")
        self._supports_auth_oauth = False
コード例 #3
0
    def __init__(self, host, port=None, auth=None):
        """Establishes a connection to a NetProfiler appliance.

        :param str host: name or IP address of the NetProfiler to
            connect to

        :param int port: TCP port on which the NetProfiler appliance
            listens.  If this parameter is not specified, the function will try
            to automatically determine the port.

        :param auth: defines the authentication method and credentials
            to use to access the NetProfiler.  It should be an instance of
            :py:class:`UserAuth<steelscript.common.service.UserAuth>` or
            :py:class:`OAuth<steelscript.common.service.OAuth>`

        :param str force_version: API version to use when communicating.
            if unspecified, this will use the latest version supported by both
            this implementation and the NetProfiler appliance.

        See the base :py:class:`Service<steelscript.common.service.Service>` class
        for more information about additional functionality supported.
        """
        super(NetProfiler, self).__init__("profiler",
                                          host,
                                          port,
                                          auth=auth,
                                          versions=[APIVersion("1.0")])

        self.api = _api1.Handler(self)

        self.groupbys = DictObject.create_from_dict(_constants.groupbys)
        self.realms = _constants.realms
        self.centricities = _constants.centricities

        self._info = None

        # checking if the profiler supports 1.2
        # if yes, then use column dsc
        # otherwise, use column qos
        if (self.supported_versions is None
                or APIVersion("1.2") in self.supported_versions):
            _key, _value = ('dsc', 'dsc')
        else:
            _key, _value = ('qos', 'qos')
        self.groupbys[_key] = _value

        self._load_file_caches()
        self.columns = ColumnContainer(self._unique_columns())
        self.colnames = set(c.key for c in self.columns)

        self.areas = AreaContainer(self._areas_dict.iteritems())
コード例 #4
0
    def _get_supported_versions(self):
        try:
            # First try the standard GL7 method
            versions = super(NetShark, self)._get_supported_versions()

            if versions:
                return versions
        except RvbdHTTPException as e:
            if e.status != 404:
                raise

        # older sharks export the protocol info on /protocol_info, and
        # return a 401 unauthorized for any other unrecognized URLs
        res = self.conn.json_request('GET', '/protocol_info')
        if res.status != 200 \
               or res.getheader('content-type') != 'text/xml':
            # any other non-ok status probably means we are
            # not talking to a netshark
            return None

        from xml.etree import ElementTree
        tree = ElementTree.fromstring(res.read())
        if tree.tag != 'ProtocolInfo':
            # probably some non-netshark http server
            return None

        return [APIVersion(tree.get('Version'))]
コード例 #5
0
ファイル: service.py プロジェクト: mezgerj/steelscript
    def _get_supported_versions(self):
        """Get the common list of services and versions supported."""
        # uses the GL7 'services' resource.
        path = '/api/common/1.0/services'
        services = self.conn.json_request('GET', path)

        for service in services:
            if service['id'] == self.service:
                return [APIVersion(v) for v in service['versions']]

        return None
コード例 #6
0
    def main(self):

        # handle new packet capture version
        version = APIVersion(self.appresponse.versions['npm.packet_capture'])
        if version < APIVersion('2.0'):
            ifg = IFG('mifg_id', lambda job: job.data.config.mifg_id,
                      self.appresponse.capture.get_mifgs)
        else:
            ifg = IFG('vifgs', lambda job: job.data.config.vifgs,
                      self.appresponse.capture.get_vifgs)

        # Show Interfaces and VIFGs (skip if MIFG appliance)
        if ifg.type == 'vifgs':
            # Show interfaces
            headers = [
                'name', 'description', 'status', 'bytes_total',
                'packets_dropped', 'packets_total'
            ]
            data = []
            for iface in self.appresponse.capture.get_interfaces():
                data.append([
                    iface.name,
                    iface.data.config.description,
                    iface.status,
                    iface.stats.bytes_total.total,
                    iface.stats.packets_dropped.total,
                    iface.stats.packets_total.total,
                ])
            self.console('Interfaces', data, headers)

            headers = [
                'id', 'name', 'enabled', 'filter', 'bytes_received',
                'packets_duped', 'packets_received'
            ]
            data = []
            for vifg in self.appresponse.capture.get_vifgs():
                data.append([
                    vifg.data.id,
                    vifg.data.config.name,
                    vifg.data.config.enabled,
                    vifg.data.config.filter,
                    vifg.data.state.stats.bytes_received.total,
                    vifg.data.state.stats.packets_duped.total,
                    vifg.data.state.stats.packets_received.total,
                ])
            self.console('VIFGs', data, headers)

        # Show capture jobs
        headers = [
            'id', 'name', ifg.type, 'filter', 'state', 'start_time',
            'end_time', 'size'
        ]
        data = []
        for job in self.appresponse.capture.get_jobs():
            data.append([
                job.id, job.name,
                ifg.get_id(job),
                getattr(job.data.config, 'filter',
                        None), job.data.state.status.state,
                job.data.state.status.packet_start_time,
                job.data.state.status.packet_end_time,
                job.data.state.status.capture_size
            ])
        self.console('Capture Jobs', data, headers)

        # Show clips

        headers = ['id', 'job_id', 'start_time', 'end_time', 'filters']
        data = []
        for clip in self.appresponse.clips.get_clips():
            data.append([
                clip.id, clip.data.config.job_id, clip.data.config.start_time,
                clip.data.config.end_time,
                getattr(clip.data.config, 'filters', dict(items=None))['items']
            ])
        self.console('Clips', data, headers)

        # Show files

        headers = [
            'type', 'id', 'link_type', 'format', 'size', 'created', 'modified'
        ]
        data = []
        for obj in self.appresponse.fs.get_files():
            data.append([
                obj.data.type, obj.id, obj.data.link_type, obj.data.format,
                obj.data.size, obj.data.created, obj.data.modified
            ])
        self.console('Uploaded Files/PCAPs', data, headers)
コード例 #7
0
 def supports_version(self, version):
     if isinstance(version, types.StringTypes):
         version = APIVersion(version)
     return version in self.supported_versions
コード例 #8
0
    def main(self):

        version = APIVersion(self.appresponse.versions['npm.packet_capture'])
        if version < APIVersion('2.0'):
            ifg = IFG('mifg_id',
                      lambda job: job.data.config.mifg_id,
                      self.appresponse.capture.get_mifgs)
        else:
            ifg = IFG('vifgs',
                      lambda job: job.data.config.vifgs,
                      self.appresponse.capture.get_vifgs)

        if self.options.show_ifgs:
            headers = ['id', 'name', 'filter', 'members']
            data = []
            for xifg in ifg.get_items():
                if 'filter' in xifg.data.config:
                    f = xifg.data.config.filter
                else:
                    f = {'value': None}

                fltr = f if f['value'] else 'None'

                if 'members' in xifg.data.config:
                    members = xifg.data.config.members
                else:
                    members = xifg.data.config.interfaces

                data.append([xifg.id, xifg.name,
                             fltr,
                             members])

            Formatter.print_table(data, headers)

        elif self.options.show_jobs:
            headers = ['id', 'name', ifg.type, 'filter', 'state',
                       'start', 'end', 'size']
            data = []
            for job in self.appresponse.capture.get_jobs():
                data.append([job.id, job.name,
                             ifg.get_id(job),
                             getattr(job.data.config, 'filter',
                                     dict(string=None))['string'],
                             job.status,
                             job.data.state.status.packet_start_time,
                             job.data.state.status.packet_end_time,
                             job.data.state.status.capture_size])
            Formatter.print_table(data, headers)

        else:

            if version < APIVersion('2.0'):
                config = dict(name=self.options.jobname,
                              mifg_id=int(self.options.ifgs))
            else:
                ifgs = [int(v) for v in self.options.ifgs.split(',')]
                config = dict(name=self.options.jobname,
                              enabled=True,
                              vifgs=ifgs)

            if self.options.filter:
                fltr = dict(type=self.options.filter_type,
                            string=self.options.filter)
                config['filter'] = fltr

            self.appresponse.capture.create_job(config)
            print("Successfully created packet capture job {}"
                  .format(self.options.jobname))
コード例 #9
0
 def supports_version(self, version):
     if isinstance(version, (str,)):
         version = APIVersion(version)
     return version in self.supported_versions
コード例 #10
0
    def run(self,
            realm,
            groupby="hos",
            columns=None,
            sort_col=None,
            timefilter=None,
            trafficexpr=None,
            host_group_type="ByLocation",
            resolution="auto",
            centricity="hos",
            area=None,
            data_filter=None,
            sync=True,
            query_columns_groupby=None,
            query_columns=None,
            limit=None,
            custom_criteria=None):
        """
        :param str realm: type of query, this is automatically set by subclasses

        :param str groupby: sets the way in which data should be grouped
            (use netprofiler.groupby.*)

        :param list columns: list of key and value columns to retrieve
            (use netprofiler.columns.*)

        :param sort_col: :class:`Column` reference to sort by

        :param timefilter: range of time to query,
            instance of :class:`TimeFilter`

        :param trafficexpr: instance of :class:`TrafficFilter`

        :param str host_group_type: sets the host group type to use
            when the groupby is related to groups
            (such as 'group' or 'peer_group').

        :param str resolution: data resolution, such as (1min, 15min, etc.),
             defaults to 'auto'

        :param str centricity: 'hos' for host-based counts,
            or 'int' for interface based counts, only affects
            directional columns
        :type centricity: 'hos' or 'int'

        :param str area: sets the appropriate scope for the report

        :param str data_filter: deprecated filter to run against report data

        :param bool sync: if True, poll for status until the report is complete

        :param list query_columns_groupby: the groupby for time columns

        :param list query_columns: list of unique values associated with
            query_columns_groupby

        :param integer limit: Upper limit of rows of the result data.
            NetProfiler will return by default a maximum of 10,000 rows,
            but with this argument that limit can be raised up to '1000000',
            if needed.
        """

        # query related parameters
        self.realm = realm
        self.groupby = groupby or 'hos'
        self.columns = self.profiler.get_columns(columns, self.groupby)
        self.centricity = centricity
        self.host_group_type = host_group_type
        self.area = area

        query = {
            "realm": self.realm,
            "centricity": self.centricity,
        }

        if realm == 'traffic_time_series':
            # The traffic_time_series realm allows 1 and only 1
            # value column -- but the user may or may not want the time
            # column.  If the time column was specified, drop it from
            # what gets sent in the POST
            non_time_column = (list(
                set(self.columns) - set([self.profiler.columns.key.time]))[0])
            query['columns'] = ([
                x.id for x in self.profiler.get_columns([non_time_column],
                                                        self.groupby)
            ])
        else:
            query['columns'] = [col.id for col in self.columns]

        if self.groupby is not None:
            query["group_by"] = self.groupby

        if sort_col:
            self.sort_col = self.profiler.get_columns([sort_col],
                                                      self.groupby)[0]
        else:
            self.sort_col = None

        if self.sort_col is not None:
            query["sort_column"] = self.sort_col.id

        if self.area is not None:
            query['area'] = self.profiler._parse_area(self.area)

        if self.groupby in ['gro', 'gpp', 'gpr', 'pgp', 'pgr']:
            query['host_group_type'] = self.host_group_type

        if query_columns_groupby is not None:
            query[query_columns_groupby] = query_columns
            query['host_group_type'] = self.host_group_type

        self._limit = limit
        if limit is not None:
            if (APIVersion("1.4") not in self.profiler.supported_versions):
                raise ProfilerException(
                    "'limit' not supported by this NetProfiler API version")

            elif realm.endswith('time_series'):
                raise ProfilerException("'limit' option is not allowed in "
                                        "time series report")

            query['limit'] = limit
        super(SingleQueryReport, self).run(template_id=184,
                                           timefilter=timefilter,
                                           resolution=resolution,
                                           query=query,
                                           trafficexpr=trafficexpr,
                                           data_filter=data_filter,
                                           sync=sync,
                                           custom_criteria=custom_criteria)
コード例 #11
0
    def main(self, module):

        try:

            # handle new packet capture version
            version = APIVersion(
                self.appresponse.versions['npm.packet_capture'])
            if version < APIVersion('2.0'):
                ifg = IFG('mifg_id', lambda job: job.data.config.mifg_id,
                          self.appresponse.capture.get_mifgs)
            else:
                ifg = IFG('vifgs', lambda job: job.data.config.vifgs,
                          self.appresponse.capture.get_vifgs)

        # Show Interfaces and VIFGs (skip if MIFG appliance)
            if ifg.type == 'vifgs':
                total = []
                # Show interfaces
                headers = [
                    'name', 'description', 'status', 'bytes_total',
                    'packets_dropped', 'packets_total'
                ]
                data = []
                for iface in self.appresponse.capture.get_interfaces():
                    data.append([
                        iface.name,
                        iface.data.config.description,
                        iface.status,
                        iface.stats.bytes_total.total,
                        iface.stats.packets_dropped.total,
                        iface.stats.packets_total.total,
                    ])
                if self.output_file is not None:
                    self.console('Interfaces', data, headers)
                total.append(headers)
                total.append(data)

                headers = [
                    'id', 'name', 'enabled', 'filter', 'bytes_received',
                    'packets_duped', 'packets_received'
                ]
                data = []
                for vifg in self.appresponse.capture.get_vifgs():
                    data.append([
                        vifg.data.id,
                        vifg.data.config.name,
                        vifg.data.config.enabled,
                        vifg.data.config.filter,
                        vifg.data.state.stats.bytes_received.total,
                        vifg.data.state.stats.packets_duped.total,
                        vifg.data.state.stats.packets_received.total,
                    ])
                if self.output_file is not None:
                    self.console('VIFGs', data, headers)
                total.append(headers)
                total.append(data)

            # Show capture jobs
            headers = [
                'id', 'name', ifg.type, 'filter', 'state', 'start_time',
                'end_time', 'size'
            ]
            data = []
            for job in self.appresponse.capture.get_jobs():
                data.append([
                    job.id, job.name,
                    ifg.get_id(job),
                    getattr(job.data.config, 'filter',
                            None), job.data.state.status.state,
                    job.data.state.status.packet_start_time,
                    job.data.state.status.packet_end_time,
                    job.data.state.status.capture_size
                ])
            if self.output_file is not None:
                self.console('Capture Jobs', data, headers)
            total.append(headers)
            total.append(data)

            # Show clips

            headers = ['id', 'job_id', 'start_time', 'end_time', 'filters']
            data = []
            for clip in self.appresponse.clips.get_clips():
                data.append([
                    clip.id, clip.data.config.job_id,
                    clip.data.config.start_time, clip.data.config.end_time,
                    getattr(clip.data.config, 'filters',
                            dict(items=None))['items']
                ])
            if self.output_file is not None:
                self.console('Clips', data, headers)
            total.append(headers)
            total.append(data)

            # Show files

            headers = [
                'type', 'id', 'link_type', 'format', 'size', 'created',
                'modified'
            ]
            data = []
            for obj in self.appresponse.fs.get_files():
                data.append([
                    obj.data.type, obj.id, obj.data.link_type, obj.data.format,
                    obj.data.size, obj.data.created, obj.data.modified
                ])
            if self.output_file is not None:
                self.console('Uploaded Files/PCAPs', data, headers)
            total.append(headers)
            total.append(data)

            if self.output_file is None:
                module.exit_json(changed=False, output=total)
            else:
                result = "Successfully wrote output to '{}'".format(
                    self.output_file)
                module.exit_json(changed=False, msg=result)

        except RvbdHTTPException as e:
            results = "Error getting list of sources from AppResponse appliance"
            module.fail_json(changed=False, msg=results, reason=str(e))
コード例 #12
0
    def __init__(self, host, port=None, auth=None, force_version=None):
        """Establishes a connection to a NetShark appliance.

        :param str host: the name or IP address of the NetShark to connect to

        :param int port: the TCP port on which the NetShark appliance
            listens.  if this parameter is not specified, the function
            will try to automatically determine the port.

        :param auth: defines the
            authentication method and credentials to use to access the
            NetShark.  It should be an instance of
            :class:`UserAuth <steelscript.common.service.UserAuth>` or
            :class:`OAuth <steelscript.common.service.OAuth>`.

        :param str force_version: is the API version to use when
            communicating.  if unspecified, this will use the latest
            version supported by both this implementation and the
            NetShark appliance.

        See the base :class:`Service <steelscript.common.service.Service>`
        class for more information about additional functionality supported.
        """

        if force_version is not None:
            if not force_version in API_TABLE:
                raise NetSharkException("API version %s unsupported" %
                                        force_version)
            versions = [APIVersion(force_version)]
        else:
            versions = [APIVersion(NetSharkAPIVersions.CURRENT)] + \
                       [APIVersion(v) for v in NetSharkAPIVersions.LEGACY]

        super(NetShark, self).__init__("shark",
                                       host,
                                       port=port,
                                       auth=auth,
                                       versions=versions)

        self.api = API_TABLE[str(self.api_version)](self)
        self.classes = CLASS_TABLE[str(self.api_version)]()
        # these module may not be available
        try:
            self.settings = self.classes.Settings(self)
        except NotImplementedError:
            self.settings = Classes()

        # Get the server info
        self.serverinfo = self.get_serverinfo()

        self.views = {}
        self._interfaces = None
        self.xtfields = {}

        def _get_fields():
            self._fetch_extractor_fields()
            return self.xtfields.items()

        def _set_fields(f):
            self.columns = f

        self.columns = ColumnProxy(_get_fields, _set_fields)

        self._capture_jobs_cache = None
        self._traceclips_cache = None
        self._tracefiles_cache = None