Beispiel #1
0
  def test_get_config_all_partially_forbidden(self):
    self.mock(storage, 'get_config_sets_async', mock.Mock())
    storage.get_config_sets_async.return_value = future([
      storage.ConfigSet(
          id='services/x',
          location='https://x.googlesource.com/x',
          latest_revision='deadbeef',
      ),
      storage.ConfigSet(
          id='projects/y',
          location='https://y.googlesource.com/y',
          latest_revision='badcoffee',
      ),
    ])
    self.mock(acl, 'can_read_config_sets', mock.Mock(return_value={
      'services/x': True,
      'projects/y': False,
    }))

    resp = self.call_api('get_config_sets', {}).json_body

    self.assertEqual(resp, {
      'config_sets': [
        {
          'config_set': 'services/x',
          'location': 'https://x.googlesource.com/x',
          'revision': {
            'id': 'deadbeef',
          }
        },
      ],
    })
Beispiel #2
0
    def test_get_config_all(self):
        self.mock(storage, 'get_config_sets_async', mock.Mock())
        storage.get_config_sets_async.return_value = future([
            storage.ConfigSet(
                id='services/x',
                location='https://x.googlesource.com/x',
                latest_revision='deadbeef',
                latest_revision_url='https://x.googlesource.com/x/+/deadbeef',
                latest_revision_time=datetime.datetime(2016, 1, 1),
                latest_revision_committer_email='*****@*****.**',
            ),
            storage.ConfigSet(
                id='projects/y',
                location='https://y.googlesource.com/y',
                latest_revision='badcoffee',
                latest_revision_url='https://y.googlesource.com/y/+/badcoffee',
                latest_revision_time=datetime.datetime(2016, 1, 2),
                latest_revision_committer_email='*****@*****.**',
            ),
        ])

        resp = self.call_api('get_config_sets', {}).json_body

        storage.get_config_sets_async.assert_called_once_with(config_set=None)

        self.assertEqual(
            resp, {
                'config_sets': [
                    {
                        'config_set': 'services/x',
                        'location': 'https://x.googlesource.com/x',
                        'revision': {
                            'id': 'deadbeef',
                            'url': 'https://x.googlesource.com/x/+/deadbeef',
                            'timestamp': '1451606400000000',
                            'committer_email': '*****@*****.**',
                        },
                    },
                    {
                        'config_set': 'projects/y',
                        'location': 'https://y.googlesource.com/y',
                        'revision': {
                            'id': 'badcoffee',
                            'url': 'https://y.googlesource.com/y/+/badcoffee',
                            'timestamp': '1451692800000000',
                            'committer_email': '*****@*****.**',
                        },
                    },
                ],
            })
    def test_revision_revision_exists(self):
        self.mock(gitiles, 'get_archive', mock.Mock())
        with open(TEST_ARCHIVE_PATH, 'r') as test_archive_file:
            gitiles.get_archive.return_value = test_archive_file.read()

        loc = gitiles.Location(hostname='localhost',
                               project='project',
                               treeish='master',
                               path='/')
        cs = storage.ConfigSet(
            id='config_set',
            latest_revision=None,
            location=str(loc),
        )
        rev = storage.Revision(
            parent=cs.key,
            id='deadbeef',
        )
        ndb.put_multi([cs, rev])

        gitiles_import._import_revision('config_set', loc, self.test_commit,
                                        False)

        cs_fresh = cs.key.get()
        self.assertEqual(cs_fresh.latest_revision, self.test_commit.sha)
Beispiel #4
0
    def put_file(self, config_set, revision, path, content):
        confg_set_key = storage.ConfigSet(id=config_set,
                                          latest_revision=revision).put()
        rev_key = storage.Revision(id=revision, parent=confg_set_key).put()

        content_hash = storage.compute_hash(content)
        storage.File(id=path, parent=rev_key, content_hash=content_hash).put()
        storage.Blob(id=content_hash, content=content).put()
Beispiel #5
0
  def test_get_config_with_include_files(
      self, mock_get_config_sets_async, mock_get_file_keys):
    mock_get_config_sets_async.return_value = future([
      storage.ConfigSet(
          id='services/x',
          location='https://x.googlesource.com/x',
          latest_revision='deadbeef',
          latest_revision_url='https://x.googlesource.com/x/+/deadbeef',
          latest_revision_time=datetime.datetime(2016, 1, 1),
          latest_revision_committer_email='*****@*****.**',
      ),
    ])

    class Key:
      def __init__(self, name):
        self.name = name

      def id(self):
        return self.name

    mock_get_file_keys.return_value = [
      Key('README.md'),
      Key('rick.morty'),
      Key('pied.piper')
    ]

    req = {
      'config_set': 'services/x',
      'include_files': True,
    }
    resp = self.call_api('get_config_sets', req).json_body

    self.assertEqual(resp, {
      'config_sets': [
        {
          'config_set': 'services/x',
          'files': [
            {
              'path': 'README.md'
            },
            {
              'path': 'rick.morty'
            },
            {
              'path': 'pied.piper'
            }
          ],
          'location': 'https://x.googlesource.com/x',
          'revision': {
            'committer_email': '*****@*****.**',
            'id': 'deadbeef',
            'timestamp': '1451606400000000',
            'url': 'https://x.googlesource.com/x/+/deadbeef'
          }
        }
      ]
    })
Beispiel #6
0
  def test_get_mapping_all_partially_forbidden(self):
    self.mock(storage, 'get_config_sets_async', mock.Mock())
    storage.get_config_sets_async.return_value = future([
      storage.ConfigSet(id='services/x', location='http://x'),
      storage.ConfigSet(id='services/y', location='http://y'),
    ])
    self.mock(acl, 'can_read_config_set', mock.Mock(side_effect=[True, False]))

    resp = self.call_api('get_mapping', {}).json_body

    self.assertEqual(resp, {
      'mappings': [
        {
          'config_set': 'services/x',
          'location': 'http://x',
        },
      ],
    })
Beispiel #7
0
  def test_get_mapping_all(self):
    self.mock(storage, 'get_config_sets_async', mock.Mock())
    storage.get_config_sets_async.return_value = future([
      storage.ConfigSet(id='services/x', location='https://x'),
      storage.ConfigSet(id='services/y', location='https://y'),
    ])
    resp = self.call_api('get_mapping', {}).json_body

    self.assertEqual(resp, {
      'mappings': [
        {
          'config_set': 'services/x',
          'location': 'https://x',
        },
        {
          'config_set': 'services/y',
          'location': 'https://y',
        },
      ],
    })
 def test_deadline_exceeded(self):
     self.mock_get_log()
     self.mock(gitiles, 'get_archive', mock.Mock())
     gitiles.get_archive.side_effect = urlfetch_errors.DeadlineExceededError
     storage.ConfigSet(
         location='https://localhost/project',
         latest_revision='deadbeef',
         version=0,
         id='config_set',
     ).put()
     with self.assertRaises(gitiles_import.Error):
         gitiles_import._import_config_set(
             'config_set',
             gitiles.Location.parse('https://localhost/project'))
     self.assert_attempt(False, 'Could not import: deadline exceeded')
Beispiel #9
0
  def put_file(self, config_set, revision, path, content):
    confg_set_key = storage.ConfigSet(
        id=config_set,
        location='https://x.com',

        latest_revision=revision,
        latest_revision_url='https://x.com/+/%s' % revision,
        latest_revision_time=datetime.datetime(2016, 1, 1),
        latest_revision_committer_email='*****@*****.**',
    ).put()
    rev_key = storage.Revision(id=revision, parent=confg_set_key).put()

    content_hash = storage.compute_hash(content)
    storage.File(id=path, parent=rev_key, content_hash=content_hash).put()
    storage.Blob(id=content_hash, content=content).put()
Beispiel #10
0
 def test_import_config_set_without_force_update(self):
     self.mock_get_log()
     storage.ConfigSet(
         id='config_set',
         latest_revision='a1841f40264376d170269ee9473ce924b7c2c4e9',
         latest_revision_url='https://localhost/project/+/deadbeef/x',
         latest_revision_committer_email=self.john.email,
         latest_revision_time=self.john.time,
         location='https://localhost/project/+/master/x',
         version=2,
     ).put()
     self.mock(gitiles_import, '_import_revision', mock.Mock())
     gitiles_import._import_config_set(
         'config_set',
         gitiles.Location.parse('https://localhost/project/+/master/x'))
     self.assertFalse(gitiles_import._import_revision.called)
    def test_import_project_ref_not_resolved(self):
        self.mock(projects, 'get_project', mock.Mock())
        projects.get_project.return_value = service_config_pb2.Project(
            id='chromium',
            config_location=service_config_pb2.ConfigSetLocation(
                url='https://localhost/chromium/src/',
                storage_type=service_config_pb2.ConfigSetLocation.GITILES,
            ),
        )

        self.mock(gitiles.Location, 'parse_resolve',
                  mock.Mock(side_effect=gitiles.TreeishResolutionError()))

        storage.ConfigSet(id='projects/chromium',
                          location='https://example.com').put()

        gitiles_import.import_project('chromium')
        self.assertIsNone(storage.ConfigSet.get_by_id('projects/chromium'))
    def test_import_config_set(self):
        self.mock_get_log()
        self.mock_get_archive()

        storage.ConfigSet(
            location='https://localhost/project',
            latest_revision='deadbeef',
            version=0,
            id='config_set',
        ).put()
        gitiles_import._import_config_set(
            'config_set', gitiles.Location.parse('https://localhost/project'),
            self.test_project_id)

        gitiles.get_log.assert_called_once_with(
            'localhost',
            'project',
            'HEAD',
            '/',
            project_id=self.test_project_id,
            limit=1,
            deadline=15)

        saved_config_set = storage.ConfigSet.get_by_id('config_set')
        self.assertIsNotNone(saved_config_set)
        self.assertEqual(saved_config_set.latest_revision,
                         'a1841f40264376d170269ee9473ce924b7c2c4e9')
        self.assertTrue(
            storage.Revision.get_by_id(
                'a1841f40264376d170269ee9473ce924b7c2c4e9',
                parent=saved_config_set.key))
        self.assert_attempt(True, 'Imported')

        # Import second time, import_revision should not be called.
        self.mock(gitiles_import, '_import_revision', mock.Mock())
        gitiles_import._import_config_set(
            'config_set', gitiles.Location.parse('https://localhost/project'),
            self.test_project_id)
        self.assertFalse(gitiles_import._import_revision.called)
        self.assert_attempt(True, 'Up-to-date')
Beispiel #13
0
  def test_get_mapping_one(self):
    self.mock(storage, 'get_config_sets_async', mock.Mock())
    storage.get_config_sets_async.return_value = future([
      storage.ConfigSet(id='services/x', location='https://x'),
    ])

    req = {
      'config_set': 'services/x',
    }
    resp = self.call_api('get_mapping', req).json_body

    storage.get_config_sets_async.assert_called_once_with(
        config_set='services/x')

    self.assertEqual(resp, {
      'mappings': [
        {
          'config_set': 'services/x',
          'location': 'https://x',
        },
      ],
    })
Beispiel #14
0
    def test_import_existing_config_set_with_log_failed(self):
        self.mock(gitiles_import, '_import_revision', mock.Mock())
        self.mock(gitiles, 'get_log', mock.Mock(return_value=None))

        cs = storage.ConfigSet(
            id='config_set',
            latest_revision='deadbeef',
            latest_revision_url='https://localhost/project/+/deadbeef/x',
            latest_revision_committer_email=self.john.email,
            latest_revision_time=self.john.time,
            location='https://localhost/project/+/master/x',
        )
        cs.put()

        with self.assertRaises(gitiles_import.HistoryDisappeared):
            gitiles_import._import_config_set(
                'config_set',
                gitiles.Location.parse('https://localhost/project'))

        self.assertIsNone(storage.last_import_attempt_key('config_set').get())

        cs_fresh = cs.key.get()
        self.assertEqual(cs.latest_revision, cs_fresh.latest_revision)
Beispiel #15
0
def import_revision(
    config_set, base_location, revision, create_config_set=False):
  """Imports a referenced Gitiles revision into a config set.

  |base_location| will be used to set storage.ConfigSet.location.

  If |create_config_set| is True and Revision entity does not exist,
  then creates ConfigSet with latest_revision set to |location.treeish|.
  """
  assert re.match('[0-9a-f]{40}', revision), (
      '"%s" is not a valid sha' % revision
  )
  logging.debug('Importing revision %s @ %s', config_set, revision)
  rev_key = ndb.Key(
      storage.ConfigSet, config_set,
      storage.Revision, revision)

  updated_config_set = storage.ConfigSet(
      id=config_set,
      latest_revision=revision,
      location=str(base_location))

  if rev_key.get():
    if create_config_set:
      updated_config_set.put()
    return

  # Fetch archive, extract files and save them to Blobs outside ConfigSet
  # transaction.
  location = base_location._replace(treeish=revision)
  archive = location.get_archive(
      deadline=get_gitiles_config().fetch_archive_deadline)
  if not archive:
    logging.error(
        'Could not import %s: configuration does not exist', config_set)
    return

  logging.info('%s archive size: %d bytes' % (config_set, len(archive)))

  entites_to_put = [storage.Revision(key=rev_key)]
  if create_config_set:
    entites_to_put.append(updated_config_set)

  stream = StringIO.StringIO(archive)
  blob_futures = []
  with tarfile.open(mode='r|gz', fileobj=stream) as tar:
    for item in tar:
      if not item.isreg():  # pragma: no cover
        continue
      with contextlib.closing(tar.extractfile(item)) as extracted:
        content = extracted.read()
        ctx = config.validation.Context.logging()
        validation.validate_config(config_set, item.name, content, ctx=ctx)
        if ctx.result().has_errors:
          logging.error('Invalid revision %s@%s', config_set, revision)
          return
        content_hash = storage.compute_hash(content)
        blob_futures.append(storage.import_blob_async(
            content=content, content_hash=content_hash))
        entites_to_put.append(
            storage.File(
                id=item.name,
                parent=rev_key,
                content_hash=content_hash)
        )

  # Wait for Blobs to be imported before proceeding.
  ndb.Future.wait_all(blob_futures)

  @ndb.transactional
  def do_import():
    if not rev_key.get():
      ndb.put_multi(entites_to_put)

  do_import()
  logging.info('Imported revision %s/%s', config_set, location.treeish)
Beispiel #16
0
def _import_revision(config_set, base_location, commit, force_update):
  """Imports a referenced Gitiles revision into a config set.

  |base_location| will be used to set storage.ConfigSet.location.

  Updates last ImportAttempt for the config set.

  Puts ConfigSet initialized from arguments.
  """
  revision = commit.sha
  assert re.match('[0-9a-f]{40}', revision), (
      '"%s" is not a valid sha' % revision
  )
  rev_key = ndb.Key(
      storage.ConfigSet, config_set,
      storage.Revision, revision)

  location = base_location._replace(treeish=revision)
  attempt = storage.ImportAttempt(
      key=storage.last_import_attempt_key(config_set),
      revision=_commit_to_revision_info(commit, location))

  cs_entity = storage.ConfigSet(
      id=config_set,
      latest_revision=revision,
      latest_revision_url=str(location),
      latest_revision_committer_email=commit.committer.email,
      latest_revision_time=commit.committer.time,
      location=str(base_location),
      version=storage.ConfigSet.CUR_VERSION,
  )

  if not force_update and rev_key.get():
    attempt.success = True
    attempt.message = 'Up-to-date'
    ndb.put_multi([cs_entity, attempt])
    return

  rev_entities = [cs_entity, storage.Revision(key=rev_key)]

  # Fetch archive outside ConfigSet transaction.
  archive = location.get_archive(
      deadline=get_gitiles_config().fetch_archive_deadline)
  if not archive:
    logging.warning(
        'Configuration %s does not exist. Probably it was deleted', config_set)
    attempt.success = True
    attempt.message = 'Config directory not found. Imported as empty'
  else:
    # Extract files and save them to Blobs outside ConfigSet transaction.
    files, validation_result = _read_and_validate_archive(
        config_set, rev_key, archive, location)
    if validation_result.has_errors:
      logging.warning('Invalid revision %s@%s', config_set, revision)
      notifications.notify_gitiles_rejection(
          config_set, location, validation_result)

      attempt.success = False
      attempt.message = 'Validation errors'
      attempt.validation_messages = [
        storage.ImportAttempt.ValidationMessage(
            severity=config.Severity.lookup_by_number(m.severity),
            text=m.text,
        )
        for m in validation_result.messages
      ]
      attempt.put()
      return
    rev_entities += files
    attempt.success = True
    attempt.message = 'Imported'

  @ndb.transactional
  def txn():
    if force_update or not rev_key.get():
      ndb.put_multi(rev_entities)
    attempt.put()

  txn()
  logging.info('Imported revision %s/%s', config_set, location.treeish)
Beispiel #17
0
    def test_notify_gitiles_rejection(self):
        ctx = validation.Context()
        ctx.error('err')
        ctx.warning('warn')

        base = gitiles.Location.parse('https://example.com/x/+/infra/config')
        new_rev = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
        new_loc = base._replace(treeish=new_rev)
        old_rev = 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'
        old_loc = base._replace(treeish=old_rev)

        self.mock(notifications, '_send', mock.Mock())

        john = gitiles.Contribution('John', '*****@*****.**',
                                    datetime.datetime(2015, 1, 1))
        commit = gitiles.Commit(sha=new_rev,
                                tree='badcoffee',
                                parents=[],
                                author=john,
                                committer=john,
                                message='New config',
                                tree_diff=None)
        self.mock(gitiles, 'get_log_async',
                  mock.Mock(return_value=ndb.Future()))
        gitiles.get_log_async.return_value.set_result(
            gitiles.Log(commits=[commit], next_cursor=None))

        self.mock(template, 'render', mock.Mock())

        self.mock(auth, 'list_group', mock.Mock())
        auth.list_group.return_value = auth.GroupListing([
            auth.Identity('user', '*****@*****.**'),
            auth.Identity('service', 'foo'),
        ], [], [])

        # Notify.

        notifications.notify_gitiles_rejection('projects/x', new_loc,
                                               ctx.result())

        self.assertTrue(notifications._send.called)
        email = notifications._send.call_args[0][0]
        self.assertEqual(
            email.sender,
            'sample-app.appspot.com <*****@*****.**>')
        self.assertEqual(email.subject, 'Config revision aaaaaaa is rejected')
        self.assertEqual(email.to, ['John <*****@*****.**>'])
        self.assertEqual(email.cc, {'*****@*****.**'})

        template.render.assert_called_with(
            'templates/validation_notification.html', {
                'author':
                'John',
                'messages': [{
                    'severity': 'ERROR',
                    'text': 'err'
                }, {
                    'severity': 'WARNING',
                    'text': 'warn'
                }],
                'rev_link':
                new_loc,
                'rev_hash':
                'aaaaaaa',
                'rev_repo':
                'x',
                'cur_rev_hash':
                None,
                'cur_rev_link':
                None,
            })

        # Do not send second time.
        notifications._send.reset_mock()
        notifications.notify_gitiles_rejection('projects/x', new_loc,
                                               ctx.result())
        self.assertFalse(notifications._send.called)

        # Now with config set.

        ndb.Key(notifications.Notification, str(new_loc)).delete()

        storage.ConfigSet(id='projects/x',
                          latest_revision=old_rev,
                          latest_revision_url=str(old_loc),
                          location=str(base)).put()

        template.render.reset_mock()
        notifications.notify_gitiles_rejection('projects/x', new_loc,
                                               ctx.result())
        template.render.assert_called_with(
            'templates/validation_notification.html', {
                'author':
                'John',
                'messages': [{
                    'severity': 'ERROR',
                    'text': 'err'
                }, {
                    'severity': 'WARNING',
                    'text': 'warn'
                }],
                'rev_link':
                new_loc,
                'rev_hash':
                'aaaaaaa',
                'rev_repo':
                'x',
                'cur_rev_hash':
                'bbbbbbb',
                'cur_rev_link':
                old_loc,
            })
Beispiel #18
0
  def test_get_config_one_with_last_attempt(self):
    self.mock(storage, 'get_config_sets_async', mock.Mock())
    storage.get_config_sets_async.return_value = future([
      storage.ConfigSet(
          id='services/x',
          location='https://x.googlesource.com/x',
          latest_revision='deadbeef',
          latest_revision_url='https://x.googlesource.com/x/+/deadbeef',
          latest_revision_time=datetime.datetime(2016, 1, 1),
          latest_revision_committer_email='*****@*****.**',
      ),
    ])

    storage.ImportAttempt(
        key=storage.last_import_attempt_key('services/x'),
        time=datetime.datetime(2016, 1, 2),
        revision=storage.RevisionInfo(
          id='badcoffee',
          url='https://x.googlesource.com/x/+/badcoffee',
          time=datetime.datetime(2016, 1, 1),
          committer_email='*****@*****.**',
        ),
        success=False,
        message='Validation errors',
        validation_messages=[
          storage.ImportAttempt.ValidationMessage(
              severity=config.Severity.ERROR,
              text='error!',
          ),
          storage.ImportAttempt.ValidationMessage(
              severity=config.Severity.WARNING,
              text='warning!',
          ),
        ],
    ).put()

    req = {
      'config_set': 'services/x',
    }
    resp = self.call_api('get_config_sets', req).json_body

    storage.get_config_sets_async.assert_called_once_with(
        config_set='services/x')

    expected_resp = {
      'config_sets': [
        {
          'config_set': 'services/x',
          'location': 'https://x.googlesource.com/x',
          'revision': {
            'id': 'deadbeef',
            'url': 'https://x.googlesource.com/x/+/deadbeef',
            'timestamp': '1451606400000000',
            'committer_email': '*****@*****.**',
          },
        },
      ],
    }
    self.assertEqual(resp, expected_resp)

    req['include_last_import_attempt'] = True
    resp = self.call_api('get_config_sets', req).json_body
    expected_resp['config_sets'][0]['last_import_attempt'] = {
      'timestamp': '1451692800000000',
      'revision': {
        'id': 'badcoffee',
        'url': 'https://x.googlesource.com/x/+/badcoffee',
        'timestamp': '1451606400000000',
        'committer_email': '*****@*****.**',
      },
      'success': False,
      'message': 'Validation errors',
      'validation_messages': [
        {
          'severity': 'ERROR',
          'text': 'error!',
        },
        {
          'severity': 'WARNING',
          'text': 'warning!',
        },
      ]
    }
    self.assertEqual(resp, expected_resp)