Example #1
0
  def get_config_sets(self, request):
    """Returns config sets."""
    if request.config_set and not self.can_read_config_set(request.config_set):
      raise endpoints.ForbiddenException()

    config_sets = storage.get_config_sets_async(
        config_set=request.config_set).get_result()

    if request.include_last_import_attempt:
      attempts = ndb.get_multi([
        storage.last_import_attempt_key(cs.key.id()) for cs in config_sets
      ])
    else:
      attempts = [None] * len(config_sets)

    res = self.GetConfigSetsResponseMessage()
    for cs, attempt in zip(config_sets, attempts):
      if self.can_read_config_set(cs.key.id()):
        timestamp = None
        if cs.latest_revision_time:
          timestamp = utils.datetime_to_timestamp(cs.latest_revision_time)
        res.config_sets.append(ConfigSet(
            config_set=cs.key.id(),
            location=cs.location,
            revision=Revision(
                id=cs.latest_revision,
                url=cs.latest_revision_url,
                timestamp=timestamp,
                committer_email=cs.latest_revision_committer_email,
            ),
            last_import_attempt=attempt_to_msg(attempt),
        ))
    return res
Example #2
0
 def save_attempt(success, msg):
   storage.ImportAttempt(
     key=storage.last_import_attempt_key(config_set),
     revision=_commit_to_revision_info(commit, location),
     success=success,
     message=msg,
   ).put()
Example #3
0
 def save_attempt(success, msg):
   storage.ImportAttempt(
     key=storage.last_import_attempt_key(config_set),
     revision=_commit_to_revision_info(commit, location),
     success=success,
     message=msg,
   ).put()
Example #4
0
    def get_config_sets(self, request):
        """Returns config sets."""
        if request.config_set and not can_read_config_set(request.config_set):
            raise endpoints.ForbiddenException()
        if request.include_files and not request.config_set:
            raise endpoints.BadRequestException(
                'Must specify config_set to use include_files')

        config_sets = storage.get_config_sets_async(
            config_set=request.config_set).get_result()

        # The files property must always be a list of File objects (not None).
        files = []
        if request.include_files:
            # There must be a single config set because request.config_set is
            # specified.
            cs = config_sets[0]
            if cs.latest_revision:
                file_keys = storage.get_file_keys(request.config_set,
                                                  cs.latest_revision)
                files = [File(path=key.id()) for key in file_keys]

        if request.include_last_import_attempt:
            attempts = ndb.get_multi([
                storage.last_import_attempt_key(cs.key.id())
                for cs in config_sets
            ])
        else:
            attempts = [None] * len(config_sets)

        res = self.GetConfigSetsResponseMessage()
        can_read = can_read_config_sets([cs.key.id() for cs in config_sets])
        for cs, attempt in zip(config_sets, attempts):
            if not can_read[cs.key.id()]:
                continue

            if common.REF_CONFIG_SET_RGX.match(cs.key.id()):
                # Exclude ref configs from the listing for crbug.com/935667
                # TODO(crbug.com/924803): remove ref configs altogether.
                continue

            cs_msg = ConfigSet(
                config_set=cs.key.id(),
                location=cs.location,
                files=files,
                last_import_attempt=attempt_to_msg(attempt),
            )
            if cs.latest_revision:
                cs_msg.revision = Revision(
                    id=cs.latest_revision,
                    url=cs.latest_revision_url,
                    committer_email=cs.latest_revision_committer_email,
                )
                if cs.latest_revision_time:
                    cs_msg.revision.timestamp = utils.datetime_to_timestamp(
                        cs.latest_revision_time)
            res.config_sets.append(cs_msg)
        return res
Example #5
0
 def assert_attempt(self, success, msg, config_set=None, no_revision=False):
   config_set = config_set or 'config_set'
   attempt = storage.last_import_attempt_key(config_set).get()
   self.assertIsNotNone(attempt)
   if no_revision:
     self.assertIsNone(attempt.revision)
   else:
     self.assertEqual(attempt.revision.id, self.test_commit.sha)
     self.assertEqual(attempt.revision.time, self.test_commit.committer.time)
     self.assertEqual(
         attempt.revision.url,
         'https://localhost/project/+/a1841f40264376d170269ee9473ce924b7c2c4e9'
     )
     self.assertEqual(attempt.revision.committer_email, '*****@*****.**')
   self.assertEqual(attempt.success, success)
   self.assertEqual(attempt.message, msg)
   return attempt
Example #6
0
    def test_import_existing_config_set_with_log_failed(self):
        self.mock(gitiles_import, '_import_revision', mock.Mock())
        self.mock(gitiles, 'get_log', mock.Mock(return_value=None))

        cs = storage.ConfigSet(
            id='config_set',
            latest_revision='deadbeef',
            latest_revision_url='https://localhost/project/+/deadbeef/x',
            latest_revision_committer_email=self.john.email,
            latest_revision_time=self.john.time,
            location='https://localhost/project/+/master/x',
        )
        cs.put()

        with self.assertRaises(gitiles_import.HistoryDisappeared):
            gitiles_import._import_config_set(
                'config_set',
                gitiles.Location.parse('https://localhost/project'))

        self.assertIsNone(storage.last_import_attempt_key('config_set').get())

        cs_fresh = cs.key.get()
        self.assertEqual(cs.latest_revision, cs_fresh.latest_revision)
Example #7
0
    def get_config_sets(self, request):
        """Returns config sets."""
        if request.config_set and not self.can_read_config_set(
                request.config_set):
            raise endpoints.ForbiddenException()

        config_sets = storage.get_config_sets_async(
            config_set=request.config_set).get_result()

        if request.include_last_import_attempt:
            attempts = ndb.get_multi([
                storage.last_import_attempt_key(cs.key.id())
                for cs in config_sets
            ])
        else:
            attempts = [None] * len(config_sets)

        res = self.GetConfigSetsResponseMessage()
        for cs, attempt in zip(config_sets, attempts):
            if self.can_read_config_set(cs.key.id()):
                timestamp = None
                if cs.latest_revision_time:
                    timestamp = utils.datetime_to_timestamp(
                        cs.latest_revision_time)
                res.config_sets.append(
                    ConfigSet(
                        config_set=cs.key.id(),
                        location=cs.location,
                        revision=Revision(
                            id=cs.latest_revision,
                            url=cs.latest_revision_url,
                            timestamp=timestamp,
                            committer_email=cs.latest_revision_committer_email,
                        ),
                        last_import_attempt=attempt_to_msg(attempt),
                    ))
        return res
Example #8
0
def _import_revision(config_set, base_location, commit, force_update):
  """Imports a referenced Gitiles revision into a config set.

  |base_location| will be used to set storage.ConfigSet.location.

  Updates last ImportAttempt for the config set.

  Puts ConfigSet initialized from arguments.
  """
  revision = commit.sha
  assert re.match('[0-9a-f]{40}', revision), (
      '"%s" is not a valid sha' % revision
  )
  rev_key = ndb.Key(
      storage.ConfigSet, config_set,
      storage.Revision, revision)

  location = base_location._replace(treeish=revision)
  attempt = storage.ImportAttempt(
      key=storage.last_import_attempt_key(config_set),
      revision=_commit_to_revision_info(commit, location))

  cs_entity = storage.ConfigSet(
      id=config_set,
      latest_revision=revision,
      latest_revision_url=str(location),
      latest_revision_committer_email=commit.committer.email,
      latest_revision_time=commit.committer.time,
      location=str(base_location),
      version=storage.ConfigSet.CUR_VERSION,
  )

  if not force_update and rev_key.get():
    attempt.success = True
    attempt.message = 'Up-to-date'
    ndb.put_multi([cs_entity, attempt])
    return

  rev_entities = [cs_entity, storage.Revision(key=rev_key)]

  # Fetch archive outside ConfigSet transaction.
  archive = location.get_archive(
      deadline=get_gitiles_config().fetch_archive_deadline)
  if not archive:
    logging.warning(
        'Configuration %s does not exist. Probably it was deleted', config_set)
    attempt.success = True
    attempt.message = 'Config directory not found. Imported as empty'
  else:
    # Extract files and save them to Blobs outside ConfigSet transaction.
    files, validation_result = _read_and_validate_archive(
        config_set, rev_key, archive, location)
    if validation_result.has_errors:
      logging.warning('Invalid revision %s@%s', config_set, revision)
      notifications.notify_gitiles_rejection(
          config_set, location, validation_result)

      attempt.success = False
      attempt.message = 'Validation errors'
      attempt.validation_messages = [
        storage.ImportAttempt.ValidationMessage(
            severity=config.Severity.lookup_by_number(m.severity),
            text=m.text,
        )
        for m in validation_result.messages
      ]
      attempt.put()
      return
    rev_entities += files
    attempt.success = True
    attempt.message = 'Imported'

  @ndb.transactional
  def txn():
    if force_update or not rev_key.get():
      ndb.put_multi(rev_entities)
    attempt.put()

  txn()
  logging.info('Imported revision %s/%s', config_set, location.treeish)
Example #9
0
  def test_get_config_one_with_last_attempt(self):
    self.mock(storage, 'get_config_sets_async', mock.Mock())
    storage.get_config_sets_async.return_value = future([
      storage.ConfigSet(
          id='services/x',
          location='https://x.googlesource.com/x',
          latest_revision='deadbeef',
          latest_revision_url='https://x.googlesource.com/x/+/deadbeef',
          latest_revision_time=datetime.datetime(2016, 1, 1),
          latest_revision_committer_email='*****@*****.**',
      ),
    ])

    storage.ImportAttempt(
        key=storage.last_import_attempt_key('services/x'),
        time=datetime.datetime(2016, 1, 2),
        revision=storage.RevisionInfo(
          id='badcoffee',
          url='https://x.googlesource.com/x/+/badcoffee',
          time=datetime.datetime(2016, 1, 1),
          committer_email='*****@*****.**',
        ),
        success=False,
        message='Validation errors',
        validation_messages=[
          storage.ImportAttempt.ValidationMessage(
              severity=config.Severity.ERROR,
              text='error!',
          ),
          storage.ImportAttempt.ValidationMessage(
              severity=config.Severity.WARNING,
              text='warning!',
          ),
        ],
    ).put()

    req = {
      'config_set': 'services/x',
    }
    resp = self.call_api('get_config_sets', req).json_body

    storage.get_config_sets_async.assert_called_once_with(
        config_set='services/x')

    expected_resp = {
      'config_sets': [
        {
          'config_set': 'services/x',
          'location': 'https://x.googlesource.com/x',
          'revision': {
            'id': 'deadbeef',
            'url': 'https://x.googlesource.com/x/+/deadbeef',
            'timestamp': '1451606400000000',
            'committer_email': '*****@*****.**',
          },
        },
      ],
    }
    self.assertEqual(resp, expected_resp)

    req['include_last_import_attempt'] = True
    resp = self.call_api('get_config_sets', req).json_body
    expected_resp['config_sets'][0]['last_import_attempt'] = {
      'timestamp': '1451692800000000',
      'revision': {
        'id': 'badcoffee',
        'url': 'https://x.googlesource.com/x/+/badcoffee',
        'timestamp': '1451606400000000',
        'committer_email': '*****@*****.**',
      },
      'success': False,
      'message': 'Validation errors',
      'validation_messages': [
        {
          'severity': 'ERROR',
          'text': 'error!',
        },
        {
          'severity': 'WARNING',
          'text': 'warning!',
        },
      ]
    }
    self.assertEqual(resp, expected_resp)
Example #10
0
def _import_revision(config_set, base_location, commit):
  """Imports a referenced Gitiles revision into a config set.

  |base_location| will be used to set storage.ConfigSet.location.

  Updates last ImportAttempt for the config set.

  If Revision entity does not exist, then creates ConfigSet initialized from
  arguments.
  """
  revision = commit.sha
  assert re.match('[0-9a-f]{40}', revision), (
      '"%s" is not a valid sha' % revision
  )
  logging.debug('Importing revision %s @ %s', config_set, revision)
  rev_key = ndb.Key(
      storage.ConfigSet, config_set,
      storage.Revision, revision)

  location = base_location._replace(treeish=revision)
  attempt = storage.ImportAttempt(
      key=storage.last_import_attempt_key(config_set),
      revision=_commit_to_revision_info(commit, location))
  if rev_key.get():
    attempt.success = True
    attempt.message = 'Up-to-date'
    attempt.put()
    return

  rev_entities = [
    storage.ConfigSet(
        id=config_set,
        latest_revision=revision,
        latest_revision_url=str(location),
        latest_revision_committer_email=commit.committer.email,
        latest_revision_time=commit.committer.time,
        location=str(base_location),
    ),
    storage.Revision(key=rev_key),
  ]

  # Fetch archive outside ConfigSet transaction.
  archive = location.get_archive(
      deadline=get_gitiles_config().fetch_archive_deadline)
  if not archive:
    logging.warning(
        'Configuration %s does not exist. Probably it was deleted', config_set)
    attempt.success = True
    attempt.message = 'Config directory not found. Imported as empty'
  else:
    # Extract files and save them to Blobs outside ConfigSet transaction.
    files, validation_result = _read_and_validate_archive(
        config_set, rev_key, archive)
    if validation_result.has_errors:
      logging.warning('Invalid revision %s@%s', config_set, revision)
      notifications.notify_gitiles_rejection(
          config_set, location, validation_result)

      attempt.success = False
      attempt.message = 'Validation errors'
      attempt.validation_messages = [
        storage.ImportAttempt.ValidationMessage(
            severity=config.Severity.lookup_by_number(m.severity),
            text=m.text,
        )
        for m in validation_result.messages
      ]
      attempt.put()
      return
    rev_entities += files
    attempt.success = True
    attempt.message = 'Imported'

  @ndb.transactional
  def txn():
    if not rev_key.get():
      ndb.put_multi(rev_entities)
    attempt.put()

  txn()
  logging.info('Imported revision %s/%s', config_set, location.treeish)