Exemple #1
0
    def put_file(self, config_set, revision, path, content):
        confg_set_key = storage.ConfigSet(id=config_set, latest_revision=revision, location="http://x.com").put()
        rev_key = storage.Revision(id=revision, parent=confg_set_key).put()

        content_hash = storage.compute_hash(content)
        storage.File(id=path, parent=rev_key, content_hash=content_hash).put()
        storage.Blob(id=content_hash, content=content).put()
Exemple #2
0
 def test_import_blob(self):
     content = 'some content'
     storage.import_blob(content)
     storage.import_blob(content)  # Coverage.
     blob = storage.Blob.get_by_id(storage.compute_hash(content))
     self.assertIsNotNone(blob)
     self.assertEqual(blob.content, content)
Exemple #3
0
 def test_import_blob(self):
     content = "some content"
     storage.import_blob(content)
     storage.import_blob(content)  # Coverage.
     blob = storage.Blob.get_by_id_async(storage.compute_hash(content)).get_result()
     self.assertIsNotNone(blob)
     self.assertEqual(blob.content, content)
Exemple #4
0
 def test_import_blob(self):
   content = 'some content'
   storage.import_blob(content)
   storage.import_blob(content)  # Coverage.
   blob = storage.Blob.get_by_id(storage.compute_hash(content))
   self.assertIsNotNone(blob)
   self.assertEqual(blob.content, content)
Exemple #5
0
    def put_file(self, config_set, revision, path, content):
        confg_set_key = storage.ConfigSet(id=config_set,
                                          latest_revision=revision).put()
        rev_key = storage.Revision(id=revision, parent=confg_set_key).put()

        content_hash = storage.compute_hash(content)
        storage.File(id=path, parent=rev_key, content_hash=content_hash).put()
        storage.Blob(id=content_hash, content=content).put()
Exemple #6
0
  def put_file(self, config_set, revision, path, content):
    confg_set_key = storage.ConfigSet(
        id=config_set,
        location='https://x.com',

        latest_revision=revision,
        latest_revision_url='https://x.com/+/%s' % revision,
        latest_revision_time=datetime.datetime(2016, 1, 1),
        latest_revision_committer_email='*****@*****.**',
    ).put()
    rev_key = storage.Revision(id=revision, parent=confg_set_key).put()

    content_hash = storage.compute_hash(content)
    storage.File(id=path, parent=rev_key, content_hash=content_hash).put()
    storage.Blob(id=content_hash, content=content).put()
Exemple #7
0
def _read_and_validate_archive(config_set, rev_key, archive, location):
    """Reads an archive, validates all files, imports blobs and returns files.

  If all files are valid, saves contents to Blob entities and returns
  files with their hashes.

  Return:
      (files, validation_result) tuple.
  """
    logging.info('%s archive size: %d bytes' % (config_set, len(archive)))

    stream = StringIO.StringIO(archive)
    blob_futures = []
    with tarfile.open(mode='r|gz', fileobj=stream) as tar:
        files = {}
        ctx = config.validation.Context()
        for item in tar:
            if not item.isreg():  # pragma: no cover
                continue
            logging.info('Found file "%s"', item.name)
            with contextlib.closing(tar.extractfile(item)) as extracted:
                content = extracted.read()
                files[item.name] = content
                with ctx.prefix(item.name + ': '):
                    validation.validate_config(config_set,
                                               item.name,
                                               content,
                                               ctx=ctx)

    if ctx.result().has_errors:
        return [], ctx.result()

    entities = []
    for name, content in files.iteritems():
        content_hash = storage.compute_hash(content)
        blob_futures.append(
            storage.import_blob_async(content=content,
                                      content_hash=content_hash))
        entities.append(
            storage.File(id=name,
                         parent=rev_key,
                         content_hash=content_hash,
                         url=str(location.join(name))))
    # Wait for Blobs to be imported before proceeding.
    ndb.Future.wait_all(blob_futures)
    return entities, ctx.result()
def _read_and_validate_archive(config_set, rev_key, archive):
  """Reads an archive, validates all files, imports blobs and returns files.

  If all files are valid, saves contents to Blob entities and returns
  files with their hashes.

  Return:
      (files, validation_result) tuple.
  """
  logging.info('%s archive size: %d bytes' % (config_set, len(archive)))

  stream = StringIO.StringIO(archive)
  blob_futures = []
  with tarfile.open(mode='r|gz', fileobj=stream) as tar:
    files = {}
    ctx = config.validation.Context()
    for item in tar:
      if not item.isreg():  # pragma: no cover
        continue
      with contextlib.closing(tar.extractfile(item)) as extracted:
        content = extracted.read()
        files[item.name] = content
        validation.validate_config(config_set, item.name, content, ctx=ctx)

  if ctx.result().has_errors:
    return [], ctx.result()

  entities = []
  for name, content in files.iteritems():
    content_hash = storage.compute_hash(content)
    blob_futures.append(storage.import_blob_async(
      content=content, content_hash=content_hash))
    entities.append(
      storage.File(
        id=name,
        parent=rev_key,
        content_hash=content_hash)
    )
  # Wait for Blobs to be imported before proceeding.
  ndb.Future.wait_all(blob_futures)
  return entities, ctx.result()
Exemple #9
0
def import_revision(
    config_set, base_location, revision, create_config_set=False):
  """Imports a referenced Gitiles revision into a config set.

  |base_location| will be used to set storage.ConfigSet.location.

  If |create_config_set| is True and Revision entity does not exist,
  then creates ConfigSet with latest_revision set to |location.treeish|.
  """
  assert re.match('[0-9a-f]{40}', revision), (
      '"%s" is not a valid sha' % revision
  )
  logging.debug('Importing revision %s @ %s', config_set, revision)
  rev_key = ndb.Key(
      storage.ConfigSet, config_set,
      storage.Revision, revision)

  updated_config_set = storage.ConfigSet(
      id=config_set,
      latest_revision=revision,
      location=str(base_location))

  if rev_key.get():
    if create_config_set:
      updated_config_set.put()
    return

  # Fetch archive, extract files and save them to Blobs outside ConfigSet
  # transaction.
  location = base_location._replace(treeish=revision)
  archive = location.get_archive(
      deadline=get_gitiles_config().fetch_archive_deadline)
  if not archive:
    logging.error(
        'Could not import %s: configuration does not exist', config_set)
    return

  logging.info('%s archive size: %d bytes' % (config_set, len(archive)))

  entites_to_put = [storage.Revision(key=rev_key)]
  if create_config_set:
    entites_to_put.append(updated_config_set)

  stream = StringIO.StringIO(archive)
  blob_futures = []
  with tarfile.open(mode='r|gz', fileobj=stream) as tar:
    for item in tar:
      if not item.isreg():  # pragma: no cover
        continue
      with contextlib.closing(tar.extractfile(item)) as extracted:
        content = extracted.read()
        ctx = config.validation.Context.logging()
        validation.validate_config(config_set, item.name, content, ctx=ctx)
        if ctx.result().has_errors:
          logging.error('Invalid revision %s@%s', config_set, revision)
          return
        content_hash = storage.compute_hash(content)
        blob_futures.append(storage.import_blob_async(
            content=content, content_hash=content_hash))
        entites_to_put.append(
            storage.File(
                id=item.name,
                parent=rev_key,
                content_hash=content_hash)
        )

  # Wait for Blobs to be imported before proceeding.
  ndb.Future.wait_all(blob_futures)

  @ndb.transactional
  def do_import():
    if not rev_key.get():
      ndb.put_multi(entites_to_put)

  do_import()
  logging.info('Imported revision %s/%s', config_set, location.treeish)
Exemple #10
0
 def test_compute_hash(self):
   content = 'some content\n'
   # echo some content | git hash-object --stdin
   expected = 'v1:2ef267e25bd6c6a300bb473e604b092b6a48523b'
   self.assertEqual(expected, storage.compute_hash(content))
Exemple #11
0
 def test_compute_hash(self):
   content = 'some content\n'
   # echo some content | git hash-object --stdin
   expected = 'v1:2ef267e25bd6c6a300bb473e604b092b6a48523b'
   self.assertEqual(expected, storage.compute_hash(content))
 def mock_latest_config(self, config_set, contents):
     self.mock(storage, 'get_latest_configs_async', mock.Mock())
     storage.get_latest_configs_async.return_value = future({
         config_set:
         ('rev', 'file://config', storage.compute_hash(contents), contents),
     })