Esempio n. 1
0
 def save_attempt(success, msg):
   storage.ImportAttempt(
     key=storage.last_import_attempt_key(config_set),
     revision=_commit_to_revision_info(commit, location),
     success=success,
     message=msg,
   ).put()
Esempio n. 2
0
def _import_revision(config_set, base_location, commit, force_update):
  """Imports a referenced Gitiles revision into a config set.

  |base_location| will be used to set storage.ConfigSet.location.

  Updates last ImportAttempt for the config set.

  Puts ConfigSet initialized from arguments.
  """
  revision = commit.sha
  assert re.match('[0-9a-f]{40}', revision), (
      '"%s" is not a valid sha' % revision
  )
  rev_key = ndb.Key(
      storage.ConfigSet, config_set,
      storage.Revision, revision)

  location = base_location._replace(treeish=revision)
  attempt = storage.ImportAttempt(
      key=storage.last_import_attempt_key(config_set),
      revision=_commit_to_revision_info(commit, location))

  cs_entity = storage.ConfigSet(
      id=config_set,
      latest_revision=revision,
      latest_revision_url=str(location),
      latest_revision_committer_email=commit.committer.email,
      latest_revision_time=commit.committer.time,
      location=str(base_location),
      version=storage.ConfigSet.CUR_VERSION,
  )

  if not force_update and rev_key.get():
    attempt.success = True
    attempt.message = 'Up-to-date'
    ndb.put_multi([cs_entity, attempt])
    return

  rev_entities = [cs_entity, storage.Revision(key=rev_key)]

  # Fetch archive outside ConfigSet transaction.
  archive = location.get_archive(
      deadline=get_gitiles_config().fetch_archive_deadline)
  if not archive:
    logging.warning(
        'Configuration %s does not exist. Probably it was deleted', config_set)
    attempt.success = True
    attempt.message = 'Config directory not found. Imported as empty'
  else:
    # Extract files and save them to Blobs outside ConfigSet transaction.
    files, validation_result = _read_and_validate_archive(
        config_set, rev_key, archive, location)
    if validation_result.has_errors:
      logging.warning('Invalid revision %s@%s', config_set, revision)
      notifications.notify_gitiles_rejection(
          config_set, location, validation_result)

      attempt.success = False
      attempt.message = 'Validation errors'
      attempt.validation_messages = [
        storage.ImportAttempt.ValidationMessage(
            severity=config.Severity.lookup_by_number(m.severity),
            text=m.text,
        )
        for m in validation_result.messages
      ]
      attempt.put()
      return
    rev_entities += files
    attempt.success = True
    attempt.message = 'Imported'

  @ndb.transactional
  def txn():
    if force_update or not rev_key.get():
      ndb.put_multi(rev_entities)
    attempt.put()

  txn()
  logging.info('Imported revision %s/%s', config_set, location.treeish)
Esempio n. 3
0
  def test_get_config_one_with_last_attempt(self):
    self.mock(storage, 'get_config_sets_async', mock.Mock())
    storage.get_config_sets_async.return_value = future([
      storage.ConfigSet(
          id='services/x',
          location='https://x.googlesource.com/x',
          latest_revision='deadbeef',
          latest_revision_url='https://x.googlesource.com/x/+/deadbeef',
          latest_revision_time=datetime.datetime(2016, 1, 1),
          latest_revision_committer_email='*****@*****.**',
      ),
    ])

    storage.ImportAttempt(
        key=storage.last_import_attempt_key('services/x'),
        time=datetime.datetime(2016, 1, 2),
        revision=storage.RevisionInfo(
          id='badcoffee',
          url='https://x.googlesource.com/x/+/badcoffee',
          time=datetime.datetime(2016, 1, 1),
          committer_email='*****@*****.**',
        ),
        success=False,
        message='Validation errors',
        validation_messages=[
          storage.ImportAttempt.ValidationMessage(
              severity=config.Severity.ERROR,
              text='error!',
          ),
          storage.ImportAttempt.ValidationMessage(
              severity=config.Severity.WARNING,
              text='warning!',
          ),
        ],
    ).put()

    req = {
      'config_set': 'services/x',
    }
    resp = self.call_api('get_config_sets', req).json_body

    storage.get_config_sets_async.assert_called_once_with(
        config_set='services/x')

    expected_resp = {
      'config_sets': [
        {
          'config_set': 'services/x',
          'location': 'https://x.googlesource.com/x',
          'revision': {
            'id': 'deadbeef',
            'url': 'https://x.googlesource.com/x/+/deadbeef',
            'timestamp': '1451606400000000',
            'committer_email': '*****@*****.**',
          },
        },
      ],
    }
    self.assertEqual(resp, expected_resp)

    req['include_last_import_attempt'] = True
    resp = self.call_api('get_config_sets', req).json_body
    expected_resp['config_sets'][0]['last_import_attempt'] = {
      'timestamp': '1451692800000000',
      'revision': {
        'id': 'badcoffee',
        'url': 'https://x.googlesource.com/x/+/badcoffee',
        'timestamp': '1451606400000000',
        'committer_email': '*****@*****.**',
      },
      'success': False,
      'message': 'Validation errors',
      'validation_messages': [
        {
          'severity': 'ERROR',
          'text': 'error!',
        },
        {
          'severity': 'WARNING',
          'text': 'warning!',
        },
      ]
    }
    self.assertEqual(resp, expected_resp)