Esempio n. 1
0
    def __init__(self, connection):
        """Initialize AppResponse object.

        :param connection: Connection object to the AppResponse appliance.
            should be an instance of
            :py:class:`Connection<steelscript.common.connection.Connection`
        """
        self.connection = connection
        self.ss_dir = SteelScriptDir('AppResponse', 'files')
Esempio n. 2
0
    def _load_file_caches(self):
        """Load and unroll locally cached files

        We want to avoid making any calls for column data here
        and just load what has been stored locally for now
        """
        self._fs_data = SteelScriptDir('NetProfiler', 'data')

        columns_filename = 'columns-' + self.version + '.pcl'
        self._columns_file = self._fs_data.get_data(columns_filename)
        if (self._columns_file.data is None
                or self._columns_file.version < _constants.CACHE_VERSION):
            # if CACHE_VERSION older than our config,
            # we must have an *old* version, and need to recreate cache
            self._columns_file.data = dict()

        areas_filename = 'areas-' + self.version + '.json'
        self._areas_file = self._fs_data.get_config(areas_filename)
        if self._areas_file.data is None:
            self._areas_file.data = self.api.report.areas()
            self._areas_file.write()

        self._verify_cache()
        self._areas_dict = dict(self._genareas(self._areas_file.data))
    def _load_sources(self):
        """Get the names and granularites of sources. The hierarchy of the
        data looks like below:

            { "source1" : { "name": string,
                            "filters_on_metrics": boolean,
                            "columns": [source_column],
                            "granularities": [string],
                          }
              ...
            }

        """
        ss_dir = SteelScriptDir('AppResponse', 'files')

        for svc in [PACKETS_REPORT_SERVICE_NAME, GENERAL_REPORT_SERVICE_NAME]:
            svc_version = self.appresponse.versions[svc]
            sw_version = (self.appresponse.get_info()['sw_version'].replace(
                ' ', ''))
            sources_filename = ('{}-sources-{}-{}.pcl'.format(
                svc, svc_version, sw_version))
            sources_file = ss_dir.get_data(sources_filename)

            sources_file.read()

            if not sources_file.data:
                svcdef = self.appresponse.find_service(svc)

                # sources is a list of dictionaries
                sources = svcdef.bind('sources').execute('get').data['items']

                # the whole set of sources for current service
                all_sources = {}

                for source in sources:
                    cols = source['columns']
                    source['columns'] = \
                        OrderedDict(sorted(zip(map(lambda x: x['id'], cols),
                                               cols)))
                    source['filters_on_metrics'] = \
                        source['capabilities']['filters_on_metrics']
                    if 'granularities' not in source:
                        source['granularities'] = None

                    all_sources[source['name']] = source

                    if source['name'] in report_source_to_groups:
                        self._sources[source['name']] = source

                # source_file writes the whole set of sources to disk
                sources_file.data = all_sources
                sources_file.write()
                logger.debug(
                    "Wrote sources data into {}".format(sources_filename))
            else:
                logger.debug(
                    "Loading sources data from {}".format(sources_filename))
                # Only load valid sources based on settings
                for k, v in sources_file.data.iteritems():
                    if k in report_source_to_groups:
                        self._sources[k] = v

        return