def test_full(self):
    self.assertEqual([
        'suppression',
        'bug_1234',
        'dlopen leak on error',
        'other one'], suppression_parser.parse("""
some garbage
even more of it
-------------------
Suppressions used:
  count name
      1 suppression
      2 bug_1234
      2 dlopen leak on error
---------------
     99 not a suppression
---------------
garbage
Suppressions used:
  count name
     10 other one
     42 bug_1234
-------------
garbage
even more garbage""".splitlines(True)))
  def test_multiple(self):
    self.assertEqual([
        'suppression',
        'bug_1234',
        'dlopen leak on error'], suppression_parser.parse("""
Suppressions used:
  count name
      1 suppression
      2 bug_1234
      2 dlopen leak on error""".splitlines(True)))
  def test_other_format(self):
    self.assertEqual(['bug_90013_a',
                      'bug_90013_d',
                      'bug_90013_e'], suppression_parser.parse("""
Suppressions used:
   count    bytes  objects name
      23     4392       61 bug_90013_a
     126    14141      578 bug_90013_d
     299    46428     1830 bug_90013_e
""".splitlines(True)))
  def test_single(self):
    self.assertEqual(['suppression'], suppression_parser.parse("""
Suppressions used:
  count name
      1 suppression""".splitlines(True)))
 def test_empty(self):
   self.assertEqual([], suppression_parser.parse(''.splitlines(True)))
Example #6
0
def reparse_suppression_results(build_step_key, _build_step_name):
    step = BuildStep.get(build_step_key)

    log_contents = ''
    if step.log_gs:
        with cloudstorage.open(step.log_gs) as gs_file:
            log_contents = html2text(gs_file.read().decode('utf-8', 'replace'))
    else:
        try:
            blob_reader = blobstore.BlobReader(step.log_stdio)
            log_contents = html2text(blob_reader.read().decode(
                'utf-8', 'replace'))
        except (ValueError, blobstore.BlobNotFoundError), e:
            raise deferred.PermanentTaskFailure(e)
    suppression_results = suppression_parser.parse(
        log_contents.splitlines(True))

    def tx_reparse():
        step = BuildStep.get(build_step_key)
        insert_suppression_results(step, suppression_results)
        step.put()

    db.run_in_transaction_custom_retries(10, tx_reparse)


def update_parsed_data(_param, chunk):
    """Ensures that all build steps' parsed data is in sync
  with current settings.
  """
    parse_gtest = dict((s.name, s.parse_gtest) for s in StepName.all())
    parse_suppression = dict(
Example #7
0

def reparse_suppression_results(build_step_key, _build_step_name):
  step = BuildStep.get(build_step_key)

  log_contents = ''
  if step.log_gs:
    with cloudstorage.open(step.log_gs) as gs_file:
      log_contents = html2text(gs_file.read().decode('utf-8', 'replace'))
  else:
    try:
      blob_reader = blobstore.BlobReader(step.log_stdio)
      log_contents = html2text(blob_reader.read().decode('utf-8', 'replace'))
    except (ValueError, blobstore.BlobNotFoundError), e:
      raise deferred.PermanentTaskFailure(e)
  suppression_results = suppression_parser.parse(log_contents.splitlines(True))
  def tx_reparse():
    step = BuildStep.get(build_step_key)
    insert_suppression_results(step, suppression_results)
    step.put()
  db.run_in_transaction_custom_retries(10, tx_reparse)


def update_parsed_data(_param, chunk):
  """Ensures that all build steps' parsed data is in sync
  with current settings.
  """
  parse_gtest = dict((s.name, s.parse_gtest) for s in StepName.all())
  parse_suppression = dict(
      (s.name, s.parse_suppression) for s in StepName.all())