def test_escape_sequences(self): counter_str = (r'{(\)\(\)\(\)\})(\)\(\)\(\)\})' r'[(\\)(\\)(1)]' r'[(\[\])(\[\])(2)]' r'[(\{\})(\{\})(3)]' r'[(\(\))(\(\))(4)]}') self.assertEqual(_parse_pre_yarn_counters(counter_str), { ')()()}': { '\\': 1, '[]': 2, '{}': 3, '()': 4, }, })
def test_escape_sequences(self): counter_str = ( r'{(\)\(\)\(\)\})(\)\(\)\(\)\})' r'[(\\)(\\)(1)]' r'[(\[\])(\[\])(2)]' r'[(\{\})(\{\})(3)]' r'[(\(\))(\(\))(4)]}') self.assertEqual( _parse_pre_yarn_counters(counter_str), { ')()()}': { '\\': 1, '[]': 2, '{}': 3, '()': 4, }, })
def test_basic(self): counter_str = ('{(org.apache.hadoop.mapred.JobInProgress$Counter)' '(Job Counters )' '[(TOTAL_LAUNCHED_REDUCES)(Launched reduce tasks)(1)]' '[(TOTAL_LAUNCHED_MAPS)(Launched map tasks)(2)]}' '{(FileSystemCounters)(FileSystemCounters)' '[(FILE_BYTES_READ)(FILE_BYTES_READ)(10547174)]}') self.assertEqual( _parse_pre_yarn_counters(counter_str), { 'Job Counters ': { 'Launched reduce tasks': 1, 'Launched map tasks': 2, }, 'FileSystemCounters': { 'FILE_BYTES_READ': 10547174, }, })
def test_basic(self): counter_str = ( '{(org.apache.hadoop.mapred.JobInProgress$Counter)' '(Job Counters )' '[(TOTAL_LAUNCHED_REDUCES)(Launched reduce tasks)(1)]' '[(TOTAL_LAUNCHED_MAPS)(Launched map tasks)(2)]}' '{(FileSystemCounters)(FileSystemCounters)' '[(FILE_BYTES_READ)(FILE_BYTES_READ)(10547174)]}') self.assertEqual( _parse_pre_yarn_counters(counter_str), { 'Job Counters ': { 'Launched reduce tasks': 1, 'Launched map tasks': 2, }, 'FileSystemCounters': { 'FILE_BYTES_READ': 10547174, }, })
def test_empty(self): self.assertEqual(_parse_pre_yarn_counters(''), {})
def test_empty(self): self.assertEqual(_parse_pre_yarn_counters(''), {})