Ejemplo n.º 1
0
    def put_file(self, config_set, revision, path, content):
        revision_url = 'https://x.com/+/%s' % revision
        confg_set_key = storage.ConfigSet(
            id=config_set,
            location='https://x.com',
            latest_revision=revision,
            latest_revision_url=revision_url,
            latest_revision_time=datetime.datetime(2016, 1, 1),
            latest_revision_committer_email='*****@*****.**',
        ).put()
        rev_key = storage.Revision(id=revision, parent=confg_set_key).put()

        content_hash = storage.compute_hash(content)
        storage.File(
            id=path,
            parent=rev_key,
            content_hash=content_hash,
            url=os.path.join(revision_url, path),
        ).put()
        storage.Blob(id=content_hash, content=content).put()
Ejemplo n.º 2
0
def import_revision(
    config_set, base_location, revision, create_config_set=False):
  """Imports a referenced Gitiles revision into a config set.

  |base_location| will be used to set storage.ConfigSet.location.

  If |create_config_set| is True and Revision entity does not exist,
  then creates ConfigSet with latest_revision set to |location.treeish|.
  """
  assert re.match('[0-9a-f]{40}', revision), (
      '"%s" is not a valid sha' % revision
  )
  logging.debug('Importing revision %s @ %s', config_set, revision)
  rev_key = ndb.Key(
      storage.ConfigSet, config_set,
      storage.Revision, revision)

  updated_config_set = storage.ConfigSet(
      id=config_set,
      latest_revision=revision,
      location=str(base_location))

  if rev_key.get():
    if create_config_set:
      updated_config_set.put()
    return

  # Fetch archive, extract files and save them to Blobs outside ConfigSet
  # transaction.
  location = base_location._replace(treeish=revision)
  archive = location.get_archive(
      deadline=get_gitiles_config().fetch_archive_deadline)
  if not archive:
    logging.error(
        'Could not import %s: configuration does not exist', config_set)
    return

  logging.info('%s archive size: %d bytes' % (config_set, len(archive)))

  entites_to_put = [storage.Revision(key=rev_key)]
  if create_config_set:
    entites_to_put.append(updated_config_set)

  stream = StringIO.StringIO(archive)
  blob_futures = []
  with tarfile.open(mode='r|gz', fileobj=stream) as tar:
    for item in tar:
      if not item.isreg():  # pragma: no cover
        continue
      with contextlib.closing(tar.extractfile(item)) as extracted:
        content = extracted.read()
        ctx = config.validation.Context.logging()
        validation.validate_config(config_set, item.name, content, ctx=ctx)
        if ctx.result().has_errors:
          logging.error('Invalid revision %s@%s', config_set, revision)
          return
        content_hash = storage.compute_hash(content)
        blob_futures.append(storage.import_blob_async(
            content=content, content_hash=content_hash))
        entites_to_put.append(
            storage.File(
                id=item.name,
                parent=rev_key,
                content_hash=content_hash)
        )

  # Wait for Blobs to be imported before proceeding.
  ndb.Future.wait_all(blob_futures)

  @ndb.transactional
  def do_import():
    if not rev_key.get():
      ndb.put_multi(entites_to_put)

  do_import()
  logging.info('Imported revision %s/%s', config_set, location.treeish)
Ejemplo n.º 3
0
def _import_revision(config_set, base_location, commit, force_update):
  """Imports a referenced Gitiles revision into a config set.

  |base_location| will be used to set storage.ConfigSet.location.

  Updates last ImportAttempt for the config set.

  Puts ConfigSet initialized from arguments.
  """
  revision = commit.sha
  assert re.match('[0-9a-f]{40}', revision), (
      '"%s" is not a valid sha' % revision
  )
  rev_key = ndb.Key(
      storage.ConfigSet, config_set,
      storage.Revision, revision)

  location = base_location._replace(treeish=revision)
  attempt = storage.ImportAttempt(
      key=storage.last_import_attempt_key(config_set),
      revision=_commit_to_revision_info(commit, location))

  cs_entity = storage.ConfigSet(
      id=config_set,
      latest_revision=revision,
      latest_revision_url=str(location),
      latest_revision_committer_email=commit.committer.email,
      latest_revision_time=commit.committer.time,
      location=str(base_location),
      version=storage.ConfigSet.CUR_VERSION,
  )

  if not force_update and rev_key.get():
    attempt.success = True
    attempt.message = 'Up-to-date'
    ndb.put_multi([cs_entity, attempt])
    return

  rev_entities = [cs_entity, storage.Revision(key=rev_key)]

  # Fetch archive outside ConfigSet transaction.
  archive = location.get_archive(
      deadline=get_gitiles_config().fetch_archive_deadline)
  if not archive:
    logging.warning(
        'Configuration %s does not exist. Probably it was deleted', config_set)
    attempt.success = True
    attempt.message = 'Config directory not found. Imported as empty'
  else:
    # Extract files and save them to Blobs outside ConfigSet transaction.
    files, validation_result = _read_and_validate_archive(
        config_set, rev_key, archive, location)
    if validation_result.has_errors:
      logging.warning('Invalid revision %s@%s', config_set, revision)
      notifications.notify_gitiles_rejection(
          config_set, location, validation_result)

      attempt.success = False
      attempt.message = 'Validation errors'
      attempt.validation_messages = [
        storage.ImportAttempt.ValidationMessage(
            severity=config.Severity.lookup_by_number(m.severity),
            text=m.text,
        )
        for m in validation_result.messages
      ]
      attempt.put()
      return
    rev_entities += files
    attempt.success = True
    attempt.message = 'Imported'

  @ndb.transactional
  def txn():
    if force_update or not rev_key.get():
      ndb.put_multi(rev_entities)
    attempt.put()

  txn()
  logging.info('Imported revision %s/%s', config_set, location.treeish)