Example #1
0
    def put(self, basekey: str, value: object):

        if isinstance(value, dict):
            for key, value in flatten(value, separator='/').items():
                self._put_encoded(os.path.join(basekey, str(key)), value)
        else:
            self._put_encoded(basekey, value)
Example #2
0
def write_feature_in_graphite_format(iostream, namespace, timestamp,
                                     feature_key, feature_val, feature_type):
    """
    Write a feature in graphite format into iostream. The graphite format
    looks like this, one line per metric value:

        [namespace].[feature_key].[metric] [value] [timestamp]\r\n
        [namespace].[feature_key].[metric] [value] [timestamp]\r\n
        [namespace].[feature_key].[metric] [value] [timestamp]\r\n

    This function converts a feature into that string and writes it into
    the iostream.

    :param namespace: Frame namespace for this feature
    :param timestamp: From frame metadata, fmt: %Y-%m-%dT%H:%M:%S%z
    :param feature_type:
    :param feature_key:
    :param feature_val:
    :param iostream: a CStringIO used to buffer the formatted features.
    :return: None
    """
    # to convert 2017-02-07T13:20:15-0500 to 1486491615 (=epoch)
    # for python >=3.2, following works
    # time.strptime(timestamp,'%Y-%m-%dT%H:%M:%S%z'),
    # but previous pyhon versions don't respect %z timezone info,
    # so skipping time zone conversion assuming
    # timezone during str timestamp creation in metadata is same for reverse

    timestamp = time.mktime(time.strptime(timestamp[:-5], '%Y-%m-%dT%H:%M:%S'))

    items = flatten(feature_val).items()
    if isinstance(namespace, dict):
        namespace = json.dumps(namespace)
    else:
        namespace = namespace.replace('/', '.')

    for (metric, value) in items:
        try:
            # Only emit values that we can cast as floats
            value = float(value)
        except (TypeError, ValueError):
            continue

        metric = metric.replace('(', '_').replace(')', '')
        metric = metric.replace(' ', '_').replace('-', '_')
        metric = metric.replace('/', '_').replace('\\', '_')

        feature_key = feature_key.replace('_', '-')
        if 'cpu' in feature_key or 'memory' in feature_key:
            metric = metric.replace('_', '-')
        if 'if' in metric:
            metric = metric.replace('_tx', '.tx')
            metric = metric.replace('_rx', '.rx')
        if feature_key == 'load':
            feature_key = 'load.load'
        feature_key = feature_key.replace('/', '$')

        tmp_message = '%s.%s.%s %f %d\n' % (namespace, feature_key, metric,
                                            value, timestamp)
        iostream.write(tmp_message)
Example #3
0
 def runwith(self, *interactions, **kw):
   confpath = kw.pop('confpath', None)
   interactions = morph.flatten(interactions)
   confpath = confpath or 'secpass.ini'
   options  = aadict(config=confpath)
   self.iostack = [i.clone() for i in interactions]
   config.cmd_config(options)
   self.assertEqual(self.iostack, [])
   with open(confpath, 'rb') as fp:
     return fp.read()
Example #4
0
 def _apiError(self, res):
   ret = str(res.status_code) + ': '
   # todo: handle case where `res.content_type` is not application/json...
   res = res.json()
   ret += res['message']
   if 'field' in res:
     ret += ' (' + ', '.join([
         key + ': ' + value
         for key, value in morph.flatten(res['field']).items()]) + ')'
   return ret
Example #5
0
    def _save_service_config(self, app, component, data):
        # fetch setvice definition with minimum structure
        svc = self._fetch_service_config(app)

        # always assume a .deis.io ending
        component = "%s.deis.io/" % component

        # add component to data and flatten
        data = {"%s%s" % (component, key): value for key, value in list(data.items())}
        svc['metadata']['annotations'].update(morph.flatten(data))

        # Update the k8s service for the application with new domain information
        self._scheduler._update_service(app, app, svc)
Example #6
0
 def test_flatten(self):
     self.assertEqual(
         morph.flatten([
             1, [2, [3, 'abc', 'def', {
                 'foo': ['zig', ['zag', 'zog']]
             }], 4]
         ]), [1, 2, 3, 'abc', 'def', {
             'foo': ['zig', ['zag', 'zog']]
         }, 4])
     self.assertEqual(morph.flatten({'a': {'b': 'c'}}), {'a.b': 'c'})
     self.assertEqual(
         morph.flatten({'a': {
             'b': 1,
             'c': [2, {
                 'd': 3,
                 'e': 4
             }]
         }}), {
             'a.b': 1,
             'a.c[0]': 2,
             'a.c[1].d': 3,
             'a.c[1].e': 4
         })
     self.assertEqual(
         morph.flatten({'a': {
             'b': [[1, 2], [3, {
                 'x': 4,
                 'y': 5
             }, 6]]
         }}), {
             'a.b[0][0]': 1,
             'a.b[0][1]': 2,
             'a.b[1][0]': 3,
             'a.b[1][1].x': 4,
             'a.b[1][1].y': 5,
             'a.b[1][2]': 6,
         })
def write_feature_in_graphite_format(iostream, namespace,
                                     feature_key, feature_val,
                                     feature_type):
    """
    Write a feature in graphite format into iostream. The graphite format
    looks like this, one line per metric value:

        [namespace].[feature_key].[metric] [value] [timestamp]\r\n
        [namespace].[feature_key].[metric] [value] [timestamp]\r\n
        [namespace].[feature_key].[metric] [value] [timestamp]\r\n

    This function converts a feature into that string and writes it into
    the iostream.

    :param namespace: Frame namespace for this feature
    :param feature_type:
    :param feature_key:
    :param feature_val:
    :param iostream: a CStringIO used to buffer the formatted features.
    :return: None
    """
    timestamp = time.time()
    items = flatten(feature_val).items()
    namespace = namespace.replace('/', '.')

    for (metric, value) in items:
        try:
            # Only emit values that we can cast as floats
            value = float(value)
        except (TypeError, ValueError):
            continue

        metric = metric.replace('(', '_').replace(')', '')
        metric = metric.replace(' ', '_').replace('-', '_')
        metric = metric.replace('/', '_').replace('\\', '_')

        feature_key = feature_key.replace('_', '-')
        if 'cpu' in feature_key or 'memory' in feature_key:
            metric = metric.replace('_', '-')
        if 'if' in metric:
            metric = metric.replace('_tx', '.tx')
            metric = metric.replace('_rx', '.rx')
        if feature_key == 'load':
            feature_key = 'load.load'
        feature_key = feature_key.replace('/', '$')

        tmp_message = '%s.%s.%s %f %d\r\n' % (namespace, feature_key,
                                              metric, value, timestamp)
        iostream.write(tmp_message)
Example #8
0
 def part_of(cls, a: str, b: str) -> bool:
     """
     Returns whether word a is semantically a part of b.
     :param a: a word
     :param b: a word
     :param synonyms: synonyms of a
     :return:
     """
     definition = cls.parser.fetch(b)
     a = cls.get_synonyms(a)
     synonyms = '|'.join(a)
     try:
         definitions = [d['definitions'] for d in definition]
         definitions = flatten(definitions)
         definitions = [d['text'] for d in definitions]
         definitions = flatten(definitions)
     except IndexError:
         return False
     for d in definitions:
         if re.search(
                 f'(?:parts? of|elements? of|portions? of|in an?).*(?:{synonyms})',
                 d) is not None:
             return True
     return False
Example #9
0
    def clients_set(self, catalog):
        bulk_body = []
        for c in catalog:
            bulk_body.append(json.dumps({"index": {}}))
            bulk_body.append(json.dumps(morph.flatten(c)))

        self.elastic.delete_by_query(index=DB._CATALOG_IDX,
                                     doc_type="clients",
                                     body={"query": {
                                         "match_all": {}
                                     }})

        self.elastic.bulk(index=DB._CATALOG_IDX,
                          doc_type="clients",
                          body="\n".join(bulk_body),
                          refresh="true")
Example #10
0
    def _save_service_config(self, app, component, data):
        # fetch setvice definition with minimum structure
        svc = self._fetch_service_config(app)

        # always assume a .deis.io ending
        component = "%s.deis.io/" % component

        # add component to data and flatten
        data = {"%s%s" % (component, key): value for key, value in list(data.items())}
        svc['metadata']['annotations'].update(morph.flatten(data))

        # Update the k8s service for the application with new service information
        try:
            self._scheduler.update_service(app, app, svc)
        except KubeException as e:
            raise ServiceUnavailable('Could not update Kubernetes Service {}'.format(app)) from e
Example #11
0
    def _save_service_config(self, app, component, data):
        # fetch setvice definition with minimum structure
        svc = self._fetch_service_config(app)

        # always assume a .deis.io ending
        component = "%s.deis.io/" % component

        # add component to data and flatten
        data = {"%s%s" % (component, key): value for key, value in list(data.items())}
        svc['metadata']['annotations'].update(morph.flatten(data))

        # Update the k8s service for the application with new service information
        try:
            self._scheduler.svc.update(app, app, svc)
        except KubeException as e:
            raise ServiceUnavailable('Could not update Kubernetes Service {}'.format(app)) from e
Example #12
0
 def _sanitize(self, text):
     '''
 Emulates behavior where LUIS lowercases the input and pads spaces for
 "special" chars.
 '''
     CHARS = '"\',.-()'  # based on observation and may not be exhaustive
     if not isinstance(text, (str, unicode)):
         text = unicode(text)
     text = text.lower().strip()
     # todo: improve this poor man's way of tokenizing
     t = text.split(' ')
     for idx, val in enumerate(t):
         for c in CHARS:
             if c in val:
                 val = val.replace(c, ' %s ' % c)  # pad c with spaces
                 t[idx] = val.split()
     return ' '.join(morph.flatten(t))
Example #13
0
    def get_flatten_domain_dict(self, _dir):
        cg_domain_json_path = os.path.join(
            _dir, self.cg_root_config_d["domain_config"])

        try:
            with open(cg_domain_json_path) as _f:
                _cg_domain_cfg = json.loads(_f.read())

        except ValueError:  # includes simplejson.decoder.JSONDecodeError
            print('Decoding JSON has failed - %s' % cg_domain_json_path)
            return False
        except:
            print('File open failed - %s' % cg_domain_json_path)
            return False

        self.flat_domain_d = flatten(_cg_domain_cfg)
        #    print self.flat_domain_d
        return True
Example #14
0
    def get_flatten_default_config(self, _dir):
        cg_dflt_json_path = os.path.join(
            _dir, self.cg_root_config_d["default_config"])

        try:
            with open(cg_dflt_json_path) as _f:
                _dflt_cfg = json.loads(_f.read(),
                                       object_pairs_hook=OrderedDict)

        except ValueError:  # includes simplejson.decoder.JSONDecodeError
            print('Decoding JSON has failed - %s' % cg_dflt_json_path)
            return False
        except:
            print('File open failed - %s' % cg_dflt_json_path)
            return False

        self.nonflat_dflt_config_d = _dflt_cfg
        self.flat_dflt_config_d = flatten(_dflt_cfg)
        #       print self.flat_dflt_config_d
        return True
Example #15
0
    def metrics_add(self, doc_type, data):
        if doc_type not in ["east-west", "north-south"]:
            raise ValueError("Wrong doc type")

        bulk_body = []
        for d in data:
            bulk_body.append(json.dumps({"index": {}}))
            bulk_body.append(json.dumps(morph.flatten(d)))

        # NOTE(boris-42): We should analyze Elastic response here.
        r = self.elastic.bulk(index=DB._DATA_ALIAS,
                              doc_type=doc_type,
                              body="\n".join(bulk_body))

        results = {}
        for it in r["items"]:
            k = it["index"]["status"]
            results.setdefault(k, 0)
            results[k] += 1

        LOG.info("Metrics bulk insert result: %s" % results)
        return results
Example #16
0
    def _pack(obj):

        import morph

        return sorted(
            ["%s=%s" % (k, v) for k, v in morph.flatten(obj).items()])
Example #17
0
    def _pack(obj):

        import morph

        return sorted(["%s=%s" % (k, v)
                       for k, v in morph.flatten(obj).items()])
Example #18
0
def flatten_settings(settings, prefix=''):
    flat_settings = morph.flatten(settings)
    flat_settings = {prefix + k: v for k, v in flat_settings.items()}
    return flat_settings