Пример #1
0
 def get(self):
     latest_rev = int(app.get_and_cache_rowdata('latest_rev')['rev_number'])
     # We may have brand new rows, so store them.
     if latest_rev not in data.rows:
         for n in xrange(data.latest_rev + 1, latest_rev + 1):
             curr_row = RowData()
             for m in data.ordered_masters:
                 update_row(n, m, curr_row)
             # If we didn't get any data, that revision doesn't exist, so skip on.
             if not curr_row.revision:
                 continue
             data.rows[n] = curr_row
     # Update our stored latest_rev to reflect the new data.
     data.latest_rev = max(data.rows.keys())
     # Now update the status of the rest of the rows.
     offset = 0
     while offset < data.SIZE:
         n = data.latest_rev - offset
         if n not in data.rows:
             offset += 1
             continue
         curr_row = data.rows[n]
         for m in data.ordered_masters:
             row_data = app.get_and_cache_rowdata('%s/console/%s' % (m, n))
             if not row_data:
                 continue
             update_status(m, row_data['status'], curr_row.status)
         offset += 1
     # Finally delete any extra rows that we don't want to keep around.
     if len(data.rows) > data.SIZE:
         old_revs = sorted(data.rows.keys(), reverse=True)[data.SIZE:]
         for rev in old_revs:
             del data.rows[rev]
     self.response.out.write('Update completed (rows %s - %s).' %
                             (min(data.rows.keys()), max(data.rows.keys())))
Пример #2
0
 def get(self):
   latest_rev = int(app.get_and_cache_rowdata('latest_rev')['rev_number'])
   # We may have brand new rows, so store them.
   if latest_rev not in data.rows:
     for n in xrange(data.latest_rev + 1, latest_rev + 1):
       curr_row = RowData()
       for m in data.ordered_masters:
         update_row(n, m, curr_row)
       # If we didn't get any data, that revision doesn't exist, so skip on.
       if not curr_row.revision:
         continue
       data.rows[n] = curr_row
   # Update our stored latest_rev to reflect the new data.
   data.latest_rev = max(data.rows.keys())
   # Now update the status of the rest of the rows.
   offset = 0
   while offset < data.SIZE:
     n = data.latest_rev - offset
     if n not in data.rows:
       offset += 1
       continue
     curr_row = data.rows[n]
     for m in data.ordered_masters:
       row_data = app.get_and_cache_rowdata('%s/console/%s' % (m, n))
       if not row_data:
         continue
       update_status(m, row_data['status'], curr_row.status)
     offset += 1
   # Finally delete any extra rows that we don't want to keep around.
   if len(data.rows) > data.SIZE:
     old_revs = sorted(data.rows.keys(), reverse=True)[data.SIZE:]
     for rev in old_revs:
       del data.rows[rev]
   self.response.out.write('Update completed (rows %s - %s).' %
                           (min(data.rows.keys()), max(data.rows.keys())))
Пример #3
0
  def test_parse_master_utf8(self):
    in_console = {'content': self.read_file('in_console.html')}
    app.parse_master(
        localpath='chromium/console',
        remoteurl='http://build.chromium.org/p/chromium/console',
        page_data=in_console)
    test_revision = '314921'
    rowdata = app.get_and_cache_rowdata('chromium/console/' + test_revision)
    summary = app.get_and_cache_pagedata('chromium/console/summary')['content']

    act_row = {}
    exp_row = {}
    for item in ['rev', 'name', 'status', 'comment']:
      # We only want to test specific values in rowdata, so we create a new
      # hash that has just those values.
      act_row[item] = rowdata[item]
      # Uncomment if deeper inspection is needed of the returned console.
      # This is also useful if changing the site layout and you need to
      # 'retrain' the test expectations.
      # self.write_file('exp_%s.html' % item,
      #                 act_row[item].encode('utf-8'))
      # self.write_file('exp_summary.html',
      #                 summary.encode('utf-8'))
      exp_row[item] = self.read_file('exp_%s.html' % item).decode('utf-8')
    exp_summary = self.read_file('exp_summary.html').decode('utf-8')

    self.assertEquals(exp_row, act_row, 'Unexpected row data found')
    self.assertEquals(exp_summary, summary, 'Unexpected build summary found')
Пример #4
0
    def test_parse_master(self):
        in_console = {'content': self.read_file('in_console.html')}
        app.parse_master(
            localpath='chromium/console',
            remoteurl='http://build.chromium.org/p/chromium/console',
            page_data=in_console)
        test_revision = '314671'
        rowdata = app.get_and_cache_rowdata('chromium/console/' +
                                            test_revision)
        summary = app.get_and_cache_pagedata(
            'chromium/console/summary')['content']

        act_row = {}
        exp_row = {}
        for item in ['rev', 'name', 'status', 'comment']:
            # We only want to test specific values in rowdata, so we create a new
            # hash that has just those values.
            act_row[item] = rowdata[item]
            # Uncomment if deeper inspection is needed of the returned console.
            # This is also useful if changing the site layout and you need to
            # 'retrain' the test expectations.
            # self.write_file('exp_%s.html' % item,
            #                 act_row[item].encode('utf-8'))
            # self.write_file('exp_summary.html',
            #                 summary.encode('utf-8'))
            exp_row[item] = self.read_file('exp_%s.html' %
                                           item).decode('utf-8')
        exp_summary = self.read_file('exp_summary.html').decode('utf-8')
        self.assertEquals(exp_row, act_row, 'Unexpected row data found')
        self.assertEquals(exp_summary, summary,
                          'Unexpected build summary found')
Пример #5
0
def update_row(revision, master, row):
    """Fetches a row from the datastore and puts it in a RowData object."""
    # Fetch the relevant data from the datastore / cache.
    row_data = app.get_and_cache_rowdata('%s/console/%s' % (master, revision))
    if not row_data:
        return
    # Only grab the common data from the main master.
    if master == 'chromium.main':
        row.revision = int(row_data['rev_number'])
        row.revlink = row_data['rev']
        row.committer = row_data['name']
        row.comment = row_data['comment']
        row.details = row_data['details']
    row.status.setdefault(master, {})
    update_status(master, row_data['status'], row.status)
Пример #6
0
def update_row(revision, master, row):
  """Fetches a row from the datastore and puts it in a RowData object."""
  # Fetch the relevant data from the datastore / cache.
  row_data = app.get_and_cache_rowdata('%s/console/%s' % (master, revision))
  if not row_data:
    return
  # Only grab the common data from the main master.
  if master == 'chromium.main':
    row.revision = int(row_data['rev_number'])
    row.revlink = row_data['rev']
    row.committer = row_data['name']
    row.comment = row_data['comment']
    row.details = row_data['details']
  row.status.setdefault(master, {})
  update_status(master, row_data['status'], row.status)
Пример #7
0
 def bootstrap(self):
     """Fills an empty MergerData with 100 rows of data."""
     # Populate the categories, masters, status, and failures data.
     for m in self.ordered_masters:
         for d in (self.ordered_builders, self.ordered_categories,
                   self.status, self.failures):
             d.setdefault(m, {})
         # Get the category data and construct the list of categories
         # for this master.
         category_data = app.get_and_cache_pagedata(
             '%s/console/categories' % m)
         if not category_data['content']:
             category_list = [u'default']
         else:
             category_soup = BeautifulSoup(category_data['content'])
             category_list = [
                 tag.string.strip()
                 for tag in category_soup.findAll('td', 'DevStatus')
             ]
         self.ordered_categories[m] = category_list
         # Get the builder status data.
         builder_data = app.get_and_cache_pagedata('%s/console/summary' % m)
         if not builder_data['content']:
             continue
         builder_soup = BeautifulSoup(builder_data['content'])
         builders_by_category = builder_soup.tr.findAll('td',
                                                        'DevSlave',
                                                        recursive=False)
         # Construct the list of builders for this category.
         for i, c in enumerate(self.ordered_categories[m]):
             self.ordered_builders[m].setdefault(c, {})
             builder_list = [
                 tag['title'] for tag in builders_by_category[i].findAll(
                     'a', 'DevSlaveBox')
             ]
             self.ordered_builders[m][c] = builder_list
         # Fill in the status data for all of this master's builders.
         update_status(m, builder_data['content'], self.status)
         # Copy that status data over into the failures dictionary too.
         for c in self.ordered_categories[m]:
             self.failures[m].setdefault(c, {})
             for b in self.ordered_builders[m][c]:
                 if self.status[m][c][b] not in ('success', 'running',
                                                 'notstarted'):
                     self.failures[m][c][b] = True
                 else:
                     self.failures[m][c][b] = False
     # Populate the individual row data, saving status info in the same
     # master/category/builder tree format constructed above.
     latest_rev = int(app.get_and_cache_rowdata('latest_rev')['rev_number'])
     if not latest_rev:
         logging.error(
             "MergerData.bootstrap(): Didn't get latest_rev. Aborting.")
         return
     n = latest_rev
     num_rows_saved = num_rows_skipped = 0
     while num_rows_saved < self.SIZE and num_rows_skipped < 10:
         curr_row = RowData()
         for m in self.ordered_masters:
             update_row(n, m, curr_row)
         # If we didn't get any data, that revision doesn't exist, so skip on.
         if not curr_row.revision:
             num_rows_skipped += 1
             n -= 1
             continue
         self.rows[n] = curr_row
         num_rows_skipped = 0
         num_rows_saved += 1
         n -= 1
     self.latest_rev = max(self.rows.keys())
Пример #8
0
 def bootstrap(self):
   """Fills an empty MergerData with 100 rows of data."""
   # Populate the categories, masters, status, and failures data.
   for m in self.ordered_masters:
     for d in (self.ordered_builders,
               self.ordered_categories,
               self.status,
               self.failures):
       d.setdefault(m, {})
     # Get the category data and construct the list of categories
     # for this master.
     category_data = app.get_and_cache_pagedata('%s/console/categories' % m)
     if not category_data['content']:
       category_list = [u'default']
     else:
       category_soup = BeautifulSoup(category_data['content'])
       category_list = [tag.string.strip() for tag in
                        category_soup.findAll('td', 'DevStatus')]
     self.ordered_categories[m] = category_list
     # Get the builder status data.
     builder_data = app.get_and_cache_pagedata('%s/console/summary' % m)
     if not builder_data['content']:
       continue
     builder_soup = BeautifulSoup(builder_data['content'])
     builders_by_category = builder_soup.tr.findAll('td', 'DevSlave',
                                                    recursive=False)
     # Construct the list of builders for this category.
     for i, c in enumerate(self.ordered_categories[m]):
       self.ordered_builders[m].setdefault(c, {})
       builder_list = [tag['title'] for tag in
                       builders_by_category[i].findAll('a', 'DevSlaveBox')]
       self.ordered_builders[m][c] = builder_list
     # Fill in the status data for all of this master's builders.
     update_status(m, builder_data['content'], self.status)
     # Copy that status data over into the failures dictionary too.
     for c in self.ordered_categories[m]:
       self.failures[m].setdefault(c, {})
       for b in self.ordered_builders[m][c]:
         if self.status[m][c][b] not in ('success', 'running', 'notstarted'):
           self.failures[m][c][b] = True
         else:
           self.failures[m][c][b] = False
   # Populate the individual row data, saving status info in the same
   # master/category/builder tree format constructed above.
   latest_rev = int(app.get_and_cache_rowdata('latest_rev')['rev_number'])
   if not latest_rev:
     logging.error("MergerData.bootstrap(): Didn't get latest_rev. Aborting.")
     return
   n = latest_rev
   num_rows_saved = num_rows_skipped = 0
   while num_rows_saved < self.SIZE and num_rows_skipped < 10:
     curr_row = RowData()
     for m in self.ordered_masters:
       update_row(n, m, curr_row)
     # If we didn't get any data, that revision doesn't exist, so skip on.
     if not curr_row.revision:
       num_rows_skipped += 1
       n -= 1
       continue
     self.rows[n] = curr_row
     num_rows_skipped = 0
     num_rows_saved += 1
     n -= 1
   self.latest_rev = max(self.rows.keys())