Пример #1
0
    def test_hadoop_logger_parser(self):
        messages = ["Hello world", "zhu xi da fa hao", "Happy 1024 day!"]
        hadoop_stderr_msg = "Hadoop client STDERR:"
        expected = [hadoop_stderr_msg + msg for msg in messages]
        for log, _ in zip(messages, expected):
            backend_parser.backend_parser(hadoop_stderr_msg + log)

        self.assertEqual(expected, self.logged_msgs['info'])
Пример #2
0
    def test_hadoop_logger_parser(self):
        messages = [
            "Hello world",
            "zhu xi da fa hao",
            "Happy 1024 day!"
        ]
        hadoop_stderr_msg = "Hadoop client STDERR:"
        expected = [hadoop_stderr_msg + msg for msg in messages]
        for log, _ in zip(messages, expected):
            backend_parser.backend_parser(hadoop_stderr_msg + log)

        self.assertEqual(expected, self.logged_msgs['info'])
Пример #3
0
    def test_spark_driver_parser_normal(self):
        messages = [
            "16:57:04 WARN UserGroupInformation: DEBUG hadoop ugi bigflow",
            "17/10/18 16:57:05 INFO Client: Source and destination file systems are the same.",
            "17/10/18 16:57:13 INFO Client: Application report for "
            "application_1500613944784_123204 (state: ACCEPTED)",
        ]

        expected = [
            "WARN UserGroupInformation: DEBUG hadoop ugi bigflow",
            "INFO Client: Source and destination file systems are the same.",
            "INFO Client: Application report for application_1500613944784_123204 (state: ACCEPTED)"
        ]
        for msg in messages:
            backend_parser.backend_parser(msg)

        self.assertEqual(expected, self.logged_msgs['info'])
Пример #4
0
    def test_spark_driver_parser_normal(self):
        messages = [
            "16:57:04 WARN UserGroupInformation: DEBUG hadoop ugi bigflow",
            "17/10/18 16:57:05 INFO Client: Source and destination file systems are the same.",
            "17/10/18 16:57:13 INFO Client: Application report for "
            "application_1500613944784_123204 (state: ACCEPTED)",

        ]

        expected = [
            "WARN UserGroupInformation: DEBUG hadoop ugi bigflow",
            "INFO Client: Source and destination file systems are the same.",
            "INFO Client: Application report for application_1500613944784_123204 (state: ACCEPTED)"
        ]
        for msg in messages:
            backend_parser.backend_parser(msg)

        self.assertEqual(expected, self.logged_msgs['info'])
Пример #5
0
 def test_simple_parser(self):
     backend_parser.backend_parser("start to launch job...")
     backend_parser.backend_parser("Planner start optimizing")
     backend_parser.backend_parser("Planner finished optimizing")
     expected = ["Start new job",
                 "Backend planner start optimizing",
                 "Backend planner finished optimizing",
                 "Start running job"]
     self.assertEqual(expected, self.logged_msgs['info'])
Пример #6
0
 def test_simple_parser(self):
     backend_parser.backend_parser("start to launch job...")
     backend_parser.backend_parser("Planner start optimizing")
     backend_parser.backend_parser("Planner finished optimizing")
     expected = [
         "Start new job", "Backend planner start optimizing",
         "Backend planner finished optimizing", "Start running job"
     ]
     self.assertEqual(expected, self.logged_msgs['info'])
Пример #7
0
    def test_spark_driver_exception(self):
        tracking_log = """17/10/18 16:57:28 INFO Client:
        client token: N/A
        diagnostics: N/A
        ApplicationMaster host: 255.255.255.255
        tracking URL: http://hostname:8388/proxy/application_1500613944784_123204/
        user: bigflow"""
        binding_exception_log = """17/10/18 16:57:02 WARN java.net.BindException: Address already in use
java.net.BindException: Address already in use
        at sun.nio.ch.Net.bind0(Native Method)
        at sun.nio.ch.Net.bind(Net.java:437)
        at sun.nio.ch.Net.bind(Net.java:429)
        at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:223)
Caused by: javax.security.auth.login.LoginException: java.lang.NullPointerException
        at org.apache.hadoop.security.UserGroupInformation$BaiduHadoopLoginModule.commit(UserGroupInformation.java:247)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:497)

        at xxx
        ... 20 more"""
        addition_log = "17/10/18 16:57:02 WARN ServerConnector: Stopped Spark"

        expected_logs = []
        for log in tracking_log.split("\n"):
            backend_parser.backend_parser(log)
            if "INFO" in log:
                expected_logs.append(log[log.find("INFO"):])
            else:
                expected_logs.append(log)

        for log in binding_exception_log.split("\n"):
            backend_parser.backend_parser(log)
            if "WARN" in log:
                expected_logs.append(log[log.find("WARN"):])
            else:
                expected_logs.append(log)
        backend_parser.backend_parser(addition_log)
        expected_logs.append(addition_log[addition_log.find("WARN"):])
        self.assertEqual("\n".join(expected_logs),
                         "\n".join(self.logged_msgs['info']))
Пример #8
0
    def test_spark_driver_exception(self):
        tracking_log = """17/10/18 16:57:28 INFO Client:
        client token: N/A
        diagnostics: N/A
        ApplicationMaster host: 255.255.255.255
        tracking URL: http://hostname:8388/proxy/application_1500613944784_123204/
        user: bigflow"""
        binding_exception_log = """17/10/18 16:57:02 WARN java.net.BindException: Address already in use
java.net.BindException: Address already in use
        at sun.nio.ch.Net.bind0(Native Method)
        at sun.nio.ch.Net.bind(Net.java:437)
        at sun.nio.ch.Net.bind(Net.java:429)
        at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:223)
Caused by: javax.security.auth.login.LoginException: java.lang.NullPointerException
        at org.apache.hadoop.security.UserGroupInformation$BaiduHadoopLoginModule.commit(UserGroupInformation.java:247)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:497)

        at xxx
        ... 20 more"""
        addition_log = "17/10/18 16:57:02 WARN ServerConnector: Stopped Spark"

        expected_logs = []
        for log in tracking_log.split("\n"):
            backend_parser.backend_parser(log)
            if "INFO" in log:
                expected_logs.append(log[log.find("INFO"):])
            else:
                expected_logs.append(log)

        for log in binding_exception_log.split("\n"):
            backend_parser.backend_parser(log)
            if "WARN" in log:
                expected_logs.append(log[log.find("WARN"):])
            else:
                expected_logs.append(log)
        backend_parser.backend_parser(addition_log)
        expected_logs.append(addition_log[addition_log.find("WARN"):])
        self.assertEqual("\n".join(expected_logs), "\n".join(self.logged_msgs['info']))
Пример #9
0
 def test_local_uri_parser(self):
     backend_parser.backend_parser("io_format.cpp hello bigflow split uri : "
                                   "hdfs:///path/to/dev/null")
     self.assertEqual(["Reading input: hdfs:///path/to/dev/null"], self.logged_msgs['info'])
Пример #10
0
 def test_local_uri_parser(self):
     backend_parser.backend_parser(
         "io_format.cpp hello bigflow split uri : "
         "hdfs:///path/to/dev/null")
     self.assertEqual(["Reading input: hdfs:///path/to/dev/null"],
                      self.logged_msgs['info'])