Example #1
0
 def cache_merged_console(self, localpath):
     # Remove any query args that we don't want to keep.
     VARY_ARGS = ['numrevs=']
     args = self.request.query_string.split('&')
     args = [
         arg for arg in args
         if any([arg.startswith(pre) for pre in VARY_ARGS])
     ]
     if args:
         localpath += '?' + '&'.join(args)
     # See if we already have the appropriate page cached.
     unquoted_localpath = urllib.unquote(localpath)
     page_data = app.get_and_cache_pagedata(unquoted_localpath)
     # If we got the page and it was generated recently enough, just serve that.
     if page_data.get('content') and recent_page(page_data):
         return page_data
     # If they specified a number of revs, figure out how many they want.
     num_revs = self.request.get('numrevs')
     if num_revs:
         num_revs = utils.clean_int(num_revs, -1)
         if num_revs <= 0:
             num_revs = None
     app.console_merger(unquoted_localpath,
                        'console/chromium',
                        page_data,
                        num_rows_to_merge=num_revs)
     return app.get_and_cache_pagedata(unquoted_localpath)
Example #2
0
  def test_parse_master_utf8(self):
    in_console = {'content': self.read_file('in_console.html')}
    app.parse_master(
        localpath='chromium/console',
        remoteurl='http://build.chromium.org/p/chromium/console',
        page_data=in_console)
    test_revision = '314921'
    rowdata = app.get_and_cache_rowdata('chromium/console/' + test_revision)
    summary = app.get_and_cache_pagedata('chromium/console/summary')['content']

    act_row = {}
    exp_row = {}
    for item in ['rev', 'name', 'status', 'comment']:
      # We only want to test specific values in rowdata, so we create a new
      # hash that has just those values.
      act_row[item] = rowdata[item]
      # Uncomment if deeper inspection is needed of the returned console.
      # This is also useful if changing the site layout and you need to
      # 'retrain' the test expectations.
      # self.write_file('exp_%s.html' % item,
      #                 act_row[item].encode('utf-8'))
      # self.write_file('exp_summary.html',
      #                 summary.encode('utf-8'))
      exp_row[item] = self.read_file('exp_%s.html' % item).decode('utf-8')
    exp_summary = self.read_file('exp_summary.html').decode('utf-8')

    self.assertEquals(exp_row, act_row, 'Unexpected row data found')
    self.assertEquals(exp_summary, summary, 'Unexpected build summary found')
Example #3
0
  def test_console_utf8_devcomment(self):
    """Test that a console DevComment row with a UTF-8 character is retained."""
    for master in ['mac']:
      page_data = {'content': self.read_file('in_%s.html' % master)}
      app.parse_master(
          localpath='chromium.%s/console' % master,
          remoteurl='http://build.chromium.org/p/chromium.%s/console' % master,
          page_data=page_data)

    # Get the expected and real output, compare.
    app.console_merger(
        'chromium/console', '', {},
        masters_to_merge=[
            'chromium.mac',
        ],
        num_rows_to_merge=20)
    act_merged = app.get_and_cache_pagedata('chromium/console')['content']

    # Uncomment if deeper inspection is needed of the returned console.
    # import logging
    # logging.debug('foo')
    # self.write_file('exp_merged.html', act_merged.encode('utf-8'))
    # import code
    # code.interact(local=locals())

    self.assertEquals(self.read_file('exp_merged.html').decode('utf-8'),
                      act_merged, 'Unexpected console output found')
Example #4
0
  def test_fetch_console(self):
    def fetch_url(url):
      fr = FetchTestCase.FakeResponse()
      if url == 'http://build.chromium.org/p/chromium/console':
        fr.content = self.read_file('in.html')
      return fr

    expected_content = self.read_file('exp.html')
    app.fetch_page(
        localpath='chromium/console',
        remoteurl='http://build.chromium.org/p/chromium/console',
        maxage=0,
        postfetch=app.console_handler,
        fetch_url=fetch_url)
    page = app.get_and_cache_pagedata('chromium/console')

    # Uncomment if deeper inspection is needed of the returned console.
    # This is also useful if changing the site layout and you need to
    # 'retrain' the test expectations.
    # self.write_file('exp.html', page['content'])

    self.assertEquals('interface', page['body_class'])
    self.assertEquals(expected_content, page['content'])
    self.assertEquals(
        'http://build.chromium.org/p/chromium/console/../',
        page['offsite_base'])
    self.assertEquals('BuildBot: Chromium', page['title'])
Example #5
0
    def test_parse_master(self):
        in_console = {'content': self.read_file('in_console.html')}
        app.parse_master(
            localpath='chromium/console',
            remoteurl='http://build.chromium.org/p/chromium/console',
            page_data=in_console)
        test_revision = '314671'
        rowdata = app.get_and_cache_rowdata('chromium/console/' +
                                            test_revision)
        summary = app.get_and_cache_pagedata(
            'chromium/console/summary')['content']

        act_row = {}
        exp_row = {}
        for item in ['rev', 'name', 'status', 'comment']:
            # We only want to test specific values in rowdata, so we create a new
            # hash that has just those values.
            act_row[item] = rowdata[item]
            # Uncomment if deeper inspection is needed of the returned console.
            # This is also useful if changing the site layout and you need to
            # 'retrain' the test expectations.
            # self.write_file('exp_%s.html' % item,
            #                 act_row[item].encode('utf-8'))
            # self.write_file('exp_summary.html',
            #                 summary.encode('utf-8'))
            exp_row[item] = self.read_file('exp_%s.html' %
                                           item).decode('utf-8')
        exp_summary = self.read_file('exp_summary.html').decode('utf-8')
        self.assertEquals(exp_row, act_row, 'Unexpected row data found')
        self.assertEquals(exp_summary, summary,
                          'Unexpected build summary found')
Example #6
0
    def test_console_utf8_devcomment(self):
        """Test that a console DevComment row with a UTF-8 character is retained."""
        for master in ['mac']:
            page_data = {'content': self.read_file('in_%s.html' % master)}
            app.parse_master(
                localpath='chromium.%s/console' % master,
                remoteurl='http://build.chromium.org/p/chromium.%s/console' %
                master,
                page_data=page_data)

        # Get the expected and real output, compare.
        app.console_merger('chromium/console',
                           '', {},
                           masters_to_merge=[
                               'chromium.mac',
                           ],
                           num_rows_to_merge=20)
        act_merged = app.get_and_cache_pagedata('chromium/console')['content']

        # Uncomment if deeper inspection is needed of the returned console.
        # import logging
        # logging.debug('foo')
        # self.write_file('exp_merged.html', act_merged.encode('utf-8'))
        # import code
        # code.interact(local=locals())

        self.assertEquals(
            self.read_file('exp_merged.html').decode('utf-8'), act_merged,
            'Unexpected console output found')
Example #7
0
    def test_fetch_console(self):
        def fetch_url(url):
            fr = FetchTestCase.FakeResponse()
            if url == 'http://build.chromium.org/p/chromium/console':
                fr.content = self.read_file('in.html')
            return fr

        expected_content = self.read_file('exp.html')
        app.fetch_page(
            localpath='chromium/console',
            remoteurl='http://build.chromium.org/p/chromium/console',
            maxage=0,
            postfetch=app.console_handler,
            fetch_url=fetch_url)
        page = app.get_and_cache_pagedata('chromium/console')

        # Uncomment if deeper inspection is needed of the returned console.
        # This is also useful if changing the site layout and you need to
        # 'retrain' the test expectations.
        # self.write_file('exp.html', page['content'])

        self.assertEquals('interface', page['body_class'])
        self.assertEquals(expected_content, page['content'])
        self.assertEquals('http://build.chromium.org/p/chromium/console/../',
                          page['offsite_base'])
        self.assertEquals('BuildBot: Chromium', page['title'])
Example #8
0
 def cache_merged_console(self, localpath):
   # Remove any query args that we don't want to keep.
   VARY_ARGS = ['numrevs=']
   args = self.request.query_string.split('&')
   args = [arg for arg in args if any([arg.startswith(pre) for pre in
                                       VARY_ARGS])]
   if args:
     localpath += '?' + '&'.join(args)
   # See if we already have the appropriate page cached.
   unquoted_localpath = urllib.unquote(localpath)
   page_data = app.get_and_cache_pagedata(unquoted_localpath)
   # If we got the page and it was generated recently enough, just serve that.
   if page_data.get('content') and recent_page(page_data):
     return page_data
   # If they specified a number of revs, figure out how many they want.
   num_revs = self.request.get('numrevs')
   if num_revs:
     num_revs = utils.clean_int(num_revs, -1)
     if num_revs <= 0:
       num_revs = None
   app.console_merger(unquoted_localpath, 'console/chromium', page_data,
                      num_rows_to_merge=num_revs)
   return app.get_and_cache_pagedata(unquoted_localpath)
Example #9
0
    def _do_almost_everything(self, localpath):
        # Does almost all of the work except for writing the content to
        # the response. Returns the page_data, or None either if an error
        # occurred or if the processing of the request was fully handled
        # in this method (this is done for the console).
        unquoted_localpath = urllib.unquote(localpath)
        if self.request.path.endswith('/chromium/console'):
            page_data = self.cache_merged_console(unquoted_localpath)
        else:
            page_data = app.get_and_cache_pagedata(unquoted_localpath)
        if page_data.get('content') is None:
            app.logging.error('Page %s not found.' % unquoted_localpath)
            self.error(404)  # file not found
            return None

        self.response.headers['Content-Type'] = app.path_to_mime_type(
            unquoted_localpath)
        if self.request.path.endswith('/console'):
            template_values = self.InitializeTemplate()
            template_values['body_class'] = page_data.get('body_class')
            template_values['content'] = page_data.get('content')
            template_values['offsite_base'] = page_data.get('offsite_base')
            template_values['title'] = page_data.get('title')
            if self.user:
                reloadarg = utils.clean_int(self.request.get('reload'), -1)
                if reloadarg != -1:
                    reloadarg = max(reloadarg, 30)
                    template_values['reloadarg'] = reloadarg
            else:
                # Make the Google Frontend capable of caching this request for 60
                # seconds.
                # TODO: Caching is not working yet.
                self.response.headers['Cache-Control'] = 'public, max-age=60'
                self.response.headers['Pragma'] = 'Public'
            self.DisplayTemplate('base.html', template_values)
            return None
        self.response.headers['Cache-Control'] = 'public, max-age=60'
        self.response.headers['Pragma'] = 'Public'
        return page_data
Example #10
0
  def _do_almost_everything(self, localpath):
    # Does almost all of the work except for writing the content to
    # the response. Returns the page_data, or None either if an error
    # occurred or if the processing of the request was fully handled
    # in this method (this is done for the console).
    unquoted_localpath = urllib.unquote(localpath)
    if self.request.path.endswith('/chromium/console'):
      page_data = self.cache_merged_console(unquoted_localpath)
    else:
      page_data = app.get_and_cache_pagedata(unquoted_localpath)
    if page_data.get('content') is None:
      app.logging.error('Page %s not found.' % unquoted_localpath)
      self.error(404)  # file not found
      return None

    self.response.headers['Content-Type'] = app.path_to_mime_type(
        unquoted_localpath)
    if self.request.path.endswith('/console'):
      template_values = self.InitializeTemplate()
      template_values['body_class'] = page_data.get('body_class')
      template_values['content'] = page_data.get('content')
      template_values['offsite_base'] = page_data.get('offsite_base')
      template_values['title'] = page_data.get('title')
      if self.user:
        reloadarg = utils.clean_int(self.request.get('reload'), -1)
        if reloadarg != -1:
          reloadarg = max(reloadarg, 30)
          template_values['reloadarg'] = reloadarg
      else:
        # Make the Google Frontend capable of caching this request for 60
        # seconds.
        # TODO: Caching is not working yet.
        self.response.headers['Cache-Control'] = 'public, max-age=60'
        self.response.headers['Pragma'] = 'Public'
      self.DisplayTemplate('base.html', template_values)
      return None
    self.response.headers['Cache-Control'] = 'public, max-age=60'
    self.response.headers['Pragma'] = 'Public'
    return page_data
Example #11
0
 def bootstrap(self):
     """Fills an empty MergerData with 100 rows of data."""
     # Populate the categories, masters, status, and failures data.
     for m in self.ordered_masters:
         for d in (self.ordered_builders, self.ordered_categories,
                   self.status, self.failures):
             d.setdefault(m, {})
         # Get the category data and construct the list of categories
         # for this master.
         category_data = app.get_and_cache_pagedata(
             '%s/console/categories' % m)
         if not category_data['content']:
             category_list = [u'default']
         else:
             category_soup = BeautifulSoup(category_data['content'])
             category_list = [
                 tag.string.strip()
                 for tag in category_soup.findAll('td', 'DevStatus')
             ]
         self.ordered_categories[m] = category_list
         # Get the builder status data.
         builder_data = app.get_and_cache_pagedata('%s/console/summary' % m)
         if not builder_data['content']:
             continue
         builder_soup = BeautifulSoup(builder_data['content'])
         builders_by_category = builder_soup.tr.findAll('td',
                                                        'DevSlave',
                                                        recursive=False)
         # Construct the list of builders for this category.
         for i, c in enumerate(self.ordered_categories[m]):
             self.ordered_builders[m].setdefault(c, {})
             builder_list = [
                 tag['title'] for tag in builders_by_category[i].findAll(
                     'a', 'DevSlaveBox')
             ]
             self.ordered_builders[m][c] = builder_list
         # Fill in the status data for all of this master's builders.
         update_status(m, builder_data['content'], self.status)
         # Copy that status data over into the failures dictionary too.
         for c in self.ordered_categories[m]:
             self.failures[m].setdefault(c, {})
             for b in self.ordered_builders[m][c]:
                 if self.status[m][c][b] not in ('success', 'running',
                                                 'notstarted'):
                     self.failures[m][c][b] = True
                 else:
                     self.failures[m][c][b] = False
     # Populate the individual row data, saving status info in the same
     # master/category/builder tree format constructed above.
     latest_rev = int(app.get_and_cache_rowdata('latest_rev')['rev_number'])
     if not latest_rev:
         logging.error(
             "MergerData.bootstrap(): Didn't get latest_rev. Aborting.")
         return
     n = latest_rev
     num_rows_saved = num_rows_skipped = 0
     while num_rows_saved < self.SIZE and num_rows_skipped < 10:
         curr_row = RowData()
         for m in self.ordered_masters:
             update_row(n, m, curr_row)
         # If we didn't get any data, that revision doesn't exist, so skip on.
         if not curr_row.revision:
             num_rows_skipped += 1
             n -= 1
             continue
         self.rows[n] = curr_row
         num_rows_skipped = 0
         num_rows_saved += 1
         n -= 1
     self.latest_rev = max(self.rows.keys())
Example #12
0
 def bootstrap(self):
   """Fills an empty MergerData with 100 rows of data."""
   # Populate the categories, masters, status, and failures data.
   for m in self.ordered_masters:
     for d in (self.ordered_builders,
               self.ordered_categories,
               self.status,
               self.failures):
       d.setdefault(m, {})
     # Get the category data and construct the list of categories
     # for this master.
     category_data = app.get_and_cache_pagedata('%s/console/categories' % m)
     if not category_data['content']:
       category_list = [u'default']
     else:
       category_soup = BeautifulSoup(category_data['content'])
       category_list = [tag.string.strip() for tag in
                        category_soup.findAll('td', 'DevStatus')]
     self.ordered_categories[m] = category_list
     # Get the builder status data.
     builder_data = app.get_and_cache_pagedata('%s/console/summary' % m)
     if not builder_data['content']:
       continue
     builder_soup = BeautifulSoup(builder_data['content'])
     builders_by_category = builder_soup.tr.findAll('td', 'DevSlave',
                                                    recursive=False)
     # Construct the list of builders for this category.
     for i, c in enumerate(self.ordered_categories[m]):
       self.ordered_builders[m].setdefault(c, {})
       builder_list = [tag['title'] for tag in
                       builders_by_category[i].findAll('a', 'DevSlaveBox')]
       self.ordered_builders[m][c] = builder_list
     # Fill in the status data for all of this master's builders.
     update_status(m, builder_data['content'], self.status)
     # Copy that status data over into the failures dictionary too.
     for c in self.ordered_categories[m]:
       self.failures[m].setdefault(c, {})
       for b in self.ordered_builders[m][c]:
         if self.status[m][c][b] not in ('success', 'running', 'notstarted'):
           self.failures[m][c][b] = True
         else:
           self.failures[m][c][b] = False
   # Populate the individual row data, saving status info in the same
   # master/category/builder tree format constructed above.
   latest_rev = int(app.get_and_cache_rowdata('latest_rev')['rev_number'])
   if not latest_rev:
     logging.error("MergerData.bootstrap(): Didn't get latest_rev. Aborting.")
     return
   n = latest_rev
   num_rows_saved = num_rows_skipped = 0
   while num_rows_saved < self.SIZE and num_rows_skipped < 10:
     curr_row = RowData()
     for m in self.ordered_masters:
       update_row(n, m, curr_row)
     # If we didn't get any data, that revision doesn't exist, so skip on.
     if not curr_row.revision:
       num_rows_skipped += 1
       n -= 1
       continue
     self.rows[n] = curr_row
     num_rows_skipped = 0
     num_rows_saved += 1
     n -= 1
   self.latest_rev = max(self.rows.keys())