def transform_jsonlog(self): with open(self.original_file_path, 'r') as logfile: with open(self.transformed_path, 'w') as tflogfile: for line in logfile.readlines(): try: # Log lines generated from Python logger # has 'level name' followed by ' - '. # Remove this before parsing the log line patrn = ('^ERROR - |^DEBUG - |' '^WARN - |^CRITICAL - |^INFO - ') reg = re.compile(patrn) m = reg.match(line) if m is not None: line = line.replace(m.group(0), '') json_line = json.loads(line, encoding='utf-8') json_line['user'] = '' if json_line.get('_user') == 'false': # add a suffix to the tag to differentiate user # logs from system logs. json_line['user'] = '******' flatline = ("%(@timestamp)s " "solum.%(task)s.%(stage_id)s%(user)s " "%(message)s\n") tflogfile.write(flatline % json_line) except ValueError: LOG.debug("Could not parse json line: %s", line)
def test_index(self): response = self.app.get('/', headers={'Accept': 'application/json'}) self.assertEqual(response.status_int, 200) data = jsonutils.loads(response.body.decode()) self.assertEqual(data[0]['id'], 'v1.0') self.assertEqual(data[0]['status'], 'CURRENT') self.assertEqual(data[0]['link'], {'href': 'http://localhost/v1', 'target_name': 'v1'})
def test_operations_get_all(self): response = self.app.get('/v1/operations', headers={'Accept': 'application/json'}) self.assertEqual(response.status_int, 200) data = jsonutils.loads(response.body.decode()) self.assertEqual(data['uri'], 'http://localhost/v1/operations') self.assertEqual(data['type'], 'operations') self.assertEqual(data['description'], 'Collection of operations') self.assertEqual(data['operation_links'], [])
def test_platform(self): response = self.app.get('/v1/', headers={'Accept': 'application/json'}) self.assertEqual(response.status_int, 200) data = jsonutils.loads(response.body.decode()) self.assertEqual(data['uri'], 'http://localhost/v1') self.assertEqual(data['name'], 'solum') self.assertEqual(data['description'], 'solum native implementation') self.assertEqual(data['implementation_version'], version.version_string())
def _unpack_json_msg(self, msg): """Load the JSON data in msg if msg.content_type indicates that it is necessary. Put the loaded data back into msg.content and update msg.content_type appropriately. A Qpid Message containing a dict will have a content_type of 'amqp/map', whereas one containing a string that needs to be converted back from JSON will have a content_type of JSON_CONTENT_TYPE. :param msg: a Qpid Message object :returns: None """ if msg.content_type == JSON_CONTENT_TYPE: msg.content = jsonutils.loads(msg.content) msg.content_type = 'amqp/map'
def transform_jsonlog(self): with open(self.original_file_path, 'r') as logfile: with open(self.transformed_path, 'w') as tflogfile: for line in logfile.readlines(): try: json_line = json.loads(line, encoding='utf-8') json_line['user'] = '' if json_line.get('_user') == 'false': # add a suffix to the tag to differentiate user # logs from system logs. json_line['user'] = '******' flatline = ("%(@timestamp)s " "solum.%(task)s.%(build_id)s%(user)s " "%(message)s\n") tflogfile.write(flatline % json_line) except ValueError: LOG.debug("Could not parse json line: %s", line)
def deserialize_msg(msg): # NOTE(russellb): Hang on to your hats, this road is about to # get a little bumpy. # # Robustness Principle: # "Be strict in what you send, liberal in what you accept." # # At this point we have to do a bit of guessing about what it # is we just received. Here is the set of possibilities: # # 1) We received a dict. This could be 2 things: # # a) Inspect it to see if it looks like a standard message envelope. # If so, great! # # b) If it doesn't look like a standard message envelope, it could either # be a notification, or a message from before we added a message # envelope (referred to as version 1.0). # Just return the message as-is. # # 2) It's any other non-dict type. Just return it and hope for the best. # This case covers return values from rpc.call() from before message # envelopes were used. (messages to call a method were always a dict) if not isinstance(msg, dict): # See #2 above. return msg base_envelope_keys = (_VERSION_KEY, _MESSAGE_KEY) if not all(map(lambda key: key in msg, base_envelope_keys)): # See #1.b above. return msg # At this point we think we have the message envelope # format we were expecting. (#1.a above) if not version_is_compatible(_RPC_ENVELOPE_VERSION, msg[_VERSION_KEY]): raise UnsupportedRpcEnvelopeVersion(version=msg[_VERSION_KEY]) raw_msg = jsonutils.loads(msg[_MESSAGE_KEY]) return raw_msg
def deserialize_remote_exception(conf, data): failure = jsonutils.loads(str(data)) trace = failure.get('tb', []) message = failure.get('message', "") + "\n" + "\n".join(trace) name = failure.get('class') module = failure.get('module') # NOTE(ameade): We DO NOT want to allow just any module to be imported, in # order to prevent arbitrary code execution. if module not in conf.allowed_rpc_exception_modules: return RemoteError(name, failure.get('message'), trace) try: mod = importutils.import_module(module) klass = getattr(mod, name) if not issubclass(klass, Exception): raise TypeError("Can only deserialize Exceptions") failure = klass(*failure.get('args', []), **failure.get('kwargs', {})) except (AttributeError, TypeError, ImportError): return RemoteError(name, failure.get('message'), trace) ex_type = type(failure) str_override = lambda self: message new_ex_type = type(ex_type.__name__ + _REMOTE_POSTFIX, (ex_type, ), { '__str__': str_override, '__unicode__': str_override }) new_ex_type.__module__ = '%s%s' % (module, _REMOTE_POSTFIX) try: # NOTE(ameade): Dynamically create a new exception type and swap it in # as the new type for the exception. This only works on user defined # Exceptions and not core python exceptions. This is important because # we cannot necessarily change an exception message so we must override # the __str__ method. failure.__class__ = new_ex_type except TypeError: # NOTE(ameade): If a core exception then just add the traceback to the # first exception argument. failure.args = (message, ) + failure.args[1:] return failure
def deserialize_remote_exception(conf, data): failure = jsonutils.loads(str(data)) trace = failure.get('tb', []) message = failure.get('message', "") + "\n" + "\n".join(trace) name = failure.get('class') module = failure.get('module') # NOTE(ameade): We DO NOT want to allow just any module to be imported, in # order to prevent arbitrary code execution. if module not in conf.allowed_rpc_exception_modules: return RemoteError(name, failure.get('message'), trace) try: mod = importutils.import_module(module) klass = getattr(mod, name) if not issubclass(klass, Exception): raise TypeError("Can only deserialize Exceptions") failure = klass(*failure.get('args', []), **failure.get('kwargs', {})) except (AttributeError, TypeError, ImportError): return RemoteError(name, failure.get('message'), trace) ex_type = type(failure) str_override = lambda self: message new_ex_type = type(ex_type.__name__ + _REMOTE_POSTFIX, (ex_type,), {'__str__': str_override, '__unicode__': str_override}) new_ex_type.__module__ = '%s%s' % (module, _REMOTE_POSTFIX) try: # NOTE(ameade): Dynamically create a new exception type and swap it in # as the new type for the exception. This only works on user defined # Exceptions and not core python exceptions. This is important because # we cannot necessarily change an exception message so we must override # the __str__ method. failure.__class__ = new_ex_type except TypeError: # NOTE(ameade): If a core exception then just add the traceback to the # first exception argument. failure.args = (message,) + failure.args[1:] return failure
def test_components_get_all(self): response = self.app.get('/v1/components', headers={'Accept': 'application/json'}) self.assertEqual(response.status_int, 200) data = jsonutils.loads(response.body.decode()) self.assertEqual(data, [])
def _deserialize(data): """Deserialization wrapper.""" LOG.debug(_("Deserializing: %s"), data) return jsonutils.loads(data)