예제 #1
0
    def test_cq_status_fetch_stores_intermediate_status(self):
        urlfetch_mock = mock.Mock()
        urlfetch_mock.side_effect = [
            self._mock_response({
                'more': True,
                'cursor': 'abcd',
                'results': []
            }),
            self._mock_response({
                'more': False,
                'cursor': None,
                'results': []
            }),
        ]

        with mock.patch('google.appengine.api.urlfetch.fetch', urlfetch_mock):
            with mock.patch(
                    'status.cq_status.parse_cq_data') as parse_cq_data_mock:
                parse_cq_data_mock.side_effect = [
                    None, DeadlineExceededError()
                ]
                cq_status.fetch_cq_status()

        fetch_status = FetchStatus.query().get()
        self.assertEqual(fetch_status.cursor, 'abcd')
        self.assertEqual(fetch_status.begin, '1445609862.0')
        self.assertEqual(fetch_status.end, '1446214662.0')
        self.assertEqual(fetch_status.done, False)
예제 #2
0
  def test_cq_status_fetch_without_begin_end(self):
    urlfetch_mock = mock.Mock()
    urlfetch_mock.return_value.content = json.dumps({
        'more': False, 'cursor': None, 'results': []})

    FetchStatus(cursor='abcd', done=False).put()
    with mock.patch('google.appengine.api.urlfetch.fetch', urlfetch_mock):
      cq_status.fetch_cq_status()

    urlfetch_mock.assert_called_once_with(
      'https://chromium-cq-status.appspot.com/query?'
      'tags=action=verifier_jobs_update&cursor=abcd&count=10')
예제 #3
0
  def test_cq_status_fetch_stores_intermediate_status(self):
    urlfetch_mock = mock.Mock()
    urlfetch_mock.side_effect = [
        self._mock_response({'more': True, 'cursor': 'abcd', 'results': []}),
        self._mock_response({'more': False, 'cursor': None, 'results': []}),
    ]

    with mock.patch('google.appengine.api.urlfetch.fetch', urlfetch_mock):
      with mock.patch('status.cq_status.parse_cq_data') as parse_cq_data_mock:
        parse_cq_data_mock.side_effect = [None, DeadlineExceededError()]
        cq_status.fetch_cq_status()

    fetch_status = FetchStatus.query().get()
    self.assertEqual(fetch_status.cursor, 'abcd')
    self.assertEqual(fetch_status.begin, '1445609862.0')
    self.assertEqual(fetch_status.end, '1446214662.0')
    self.assertEqual(fetch_status.done, False)
예제 #4
0
  def test_cq_status_fetch_stats_new_fetch_from_last_build_run(self):
    urlfetch_mock = mock.Mock()
    urlfetch_mock.return_value.content = json.dumps({
        'more': False, 'cursor': None, 'results': []})

    FetchStatus(cursor='xxx', begin='1', end='2', done=True).put()
    BuildRun(time_started=datetime.datetime(2015, 10, 30, 12, 17, 42),
             time_finished=datetime.datetime(2015, 10, 30, 13, 17, 42),
             buildnumber=0, result=0).put()
    BuildRun(time_started=datetime.datetime(2015, 10, 30, 11, 17, 42),
             time_finished=datetime.datetime(2015, 10, 30, 12, 17, 42),
             buildnumber=0, result=0).put()
    with mock.patch('google.appengine.api.urlfetch.fetch', urlfetch_mock):
      cq_status.fetch_cq_status()

    urlfetch_mock.assert_called_once_with(
      'https://chromium-cq-status.appspot.com/query?'
      'tags=action=verifier_jobs_update&begin=1446211062.0&end=1446214662.0&'
      'count=10')
예제 #5
0
파일: cq_status.py 프로젝트: eunchong/infra
def fetch_cq_status():
  """Fetches data from chromium-cq-status app and saves new data.

  Remembers old cursor and fetches new data.
  """

  fetch_status = FetchStatus.query().get()
  cursor = ''
  begin = ''
  end = ''
  retry_count = 0

  while True:
    if fetch_status:
      if fetch_status.done:
        logging.info('historical fetching done so fetch latest...')
        end = str(time_functions.timestamp.utcnow_ts())

        last_build_run_seen = BuildRun.query().order(
            -BuildRun.time_finished).fetch(1)
        begin = str(time_functions.timestamp.utctimestamp(
            last_build_run_seen[0].time_finished))
        cursor = ''
      else:
        begin = fetch_status.begin
        end = fetch_status.end
        cursor = fetch_status.cursor
    else:
      logging.info('didnt find any historical information. fetching last week')
      begin = str(time_functions.timestamp.utctimestamp(
          datetime.datetime.utcnow() - datetime.timedelta(weeks=1)))
      end = str(time_functions.timestamp.utcnow_ts())

    if begin and end:
      logging.info('fetching from ' +
                   str(datetime.datetime.fromtimestamp(float(begin))) + ' to ' +
                   str(datetime.datetime.fromtimestamp(float(end))) +
                   ' cursor: ' + cursor)
    else:
      logging.info('fetching with no begin/end and cursor: ' + cursor)

    url = "https://chromium-cq-status.appspot.com/query"
    params = []
    params.append('tags=action=verifier_jobs_update')
    if cursor:
      params.append('cursor=' + cursor)
    if begin:
      params.append('begin=' + begin)
    if end:
      params.append('end=' + end)
    # Tried count of 200 or more but would get OOM or deadline errors. Even 50
    # sometimes gives:
    # "Values may not be more than 1000000 bytes in length; received 2118015
    # bytes"
    params.append('count=10')

    url += '?' + '&'.join(params)
    logging.info('fetching url: ' + url)

    try:
      urlfetch.set_default_fetch_deadline(60)
      result = urlfetch.fetch(url).content

      timestamp_str = '"timestamp":"'
      logging_idx = result.find(timestamp_str)
      if logging_idx != -1:
        logging_idx += len(timestamp_str)
        logging_idx2 = result.find('"', logging_idx)
        logging.info(' current fetch has time of ' +
                     result[logging_idx:logging_idx2])

      try:
        json_result = json.loads(result)

        more = json_result['more']
        cursor = json_result['cursor']

        try:
          logging_output = parse_cq_data(json_result)
          if logging_output:
            logging.info('found flakes: ' + ' '.join(logging_output))
        except DeadlineExceededError:
          logging.info('got DeadlineExceededError during parse_cq_data, '
                       'catching to not show up as error')
          return
      except ValueError:
        requests_metric.increment_by(1, fields={'status': 'parse_error'})
        logging.exception('failed to parse CQ data from %s', url)
        if 'DeadlineExceededError' in result:
          logging.error('got deadline exceeded, trying again after 1s')
          time.sleep(1)
          continue
        elif retry_count < 3:
          retry_count += 1
          logging.error('will retry after sleeping ' + str(retry_count))
          time.sleep(retry_count)
          continue
        else:
          logging.error('giving up and will count current fetch as done')
          # Don't want to continue this as could be a bad cursor.
          more = False
      else:
        requests_metric.increment_by(1, fields={'status': 'success'})

      if not fetch_status:
        fetch_status = FetchStatus()
      fetch_status.done = not more
      if fetch_status.done:
        fetch_status.cursor = ''
        fetch_status.begin = ''
        fetch_status.end = ''
        retry_count = 0
        logging.info('finished fetching for current cursor')
      else:
        fetch_status.begin = begin
        fetch_status.end = end
        fetch_status.cursor = cursor
      fetch_status.put()

      if not more:
        return  # finish the cron job and wait for next iteration
    except urllib2.URLError, e:
      requests_metric.increment_by(1, fields={'status': 'fetch_error'})
      logging.warning('Failed to fetch CQ status: %s', e.reason)
예제 #6
0
def fetch_cq_status():
    """Fetches data from chromium-cq-status app and saves new data.

  Remembers old cursor and fetches new data.
  """

    fetch_status = FetchStatus.query().get()
    cursor = ''
    begin = ''
    end = ''
    retry_count = 0

    while True:
        if fetch_status:
            if fetch_status.done:
                logging.info('historical fetching done so fetch latest...')
                end = str(time_functions.timestamp.utcnow_ts())

                last_build_run_seen = BuildRun.query().order(
                    -BuildRun.time_finished).fetch(1)
                begin = str(
                    time_functions.timestamp.utctimestamp(
                        last_build_run_seen[0].time_finished))
                cursor = ''
            else:
                begin = fetch_status.begin
                end = fetch_status.end
                cursor = fetch_status.cursor
        else:
            logging.info(
                'didnt find any historical information. fetching last week')
            begin = str(
                time_functions.timestamp.utctimestamp(
                    datetime.datetime.utcnow() - datetime.timedelta(weeks=1)))
            end = str(time_functions.timestamp.utcnow_ts())

        if begin and end:
            logging.info('fetching from %s to %s cursor: %s',
                         str(datetime.datetime.utcfromtimestamp(float(begin))),
                         str(datetime.datetime.utcfromtimestamp(float(end))),
                         cursor)
        else:
            logging.info('fetching with no begin/end and cursor: ' + cursor)

        url = "https://chromium-cq-status.appspot.com/query"
        params = []
        params.append('tags=action=verifier_jobs_update')
        if cursor:
            params.append('cursor=' + cursor)
        if begin:
            params.append('begin=' + begin)
        if end:
            params.append('end=' + end)
        # Tried count of 200 or more but would get OOM or deadline errors. Even 50
        # sometimes gives:
        # "Values may not be more than 1000000 bytes in length; received 2118015
        # bytes"
        params.append('count=10')

        url += '?' + '&'.join(params)
        logging.info('fetching url: ' + url)

        try:
            urlfetch.set_default_fetch_deadline(60)
            result = urlfetch.fetch(url).content

            timestamp_str = '"timestamp":"'
            logging_idx = result.find(timestamp_str)
            if logging_idx != -1:
                logging_idx += len(timestamp_str)
                logging_idx2 = result.find('"', logging_idx)
                logging.info(' current fetch has time of ' +
                             result[logging_idx:logging_idx2])

            try:
                json_result = json.loads(result)

                more = json_result['more']
                cursor = json_result['cursor']

                try:
                    logging_output = parse_cq_data(json_result)
                    if logging_output:
                        logging.info('found flakes: ' +
                                     ' '.join(logging_output))
                except DeadlineExceededError:
                    logging.info(
                        'got DeadlineExceededError during parse_cq_data, '
                        'catching to not show up as error')
                    return
            except ValueError:
                requests_metric.increment_by(1,
                                             fields={'status': 'parse_error'})
                logging.exception('failed to parse CQ data from %s', url)
                if 'DeadlineExceededError' in result:
                    logging.error(
                        'got deadline exceeded, trying again after 1s')
                    time.sleep(1)
                    continue
                elif retry_count < 3:
                    retry_count += 1
                    logging.error('will retry after sleeping ' +
                                  str(retry_count))
                    time.sleep(retry_count)
                    continue
                else:
                    logging.error(
                        'giving up and will count current fetch as done')
                    # Don't want to continue this as could be a bad cursor.
                    more = False
            else:
                requests_metric.increment_by(1, fields={'status': 'success'})

            if not fetch_status:
                fetch_status = FetchStatus()
            fetch_status.done = not more
            if fetch_status.done:
                fetch_status.cursor = ''
                fetch_status.begin = ''
                fetch_status.end = ''
                retry_count = 0
                logging.info('finished fetching for current cursor')
            else:
                fetch_status.begin = begin
                fetch_status.end = end
                fetch_status.cursor = cursor
            fetch_status.put()

            if not more:
                return  # finish the cron job and wait for next iteration
        except urllib2.URLError, e:
            requests_metric.increment_by(1, fields={'status': 'fetch_error'})
            logging.warning('Failed to fetch CQ status: %s', e.reason)