예제 #1
0
def _make_corpus_backup_public(target, corpus_fuzzer_name_override,
                               corpus_backup_bucket_name):
    """Identifies old corpus backups and makes them public."""
    corpus_backup_date = utils.utcnow().date() - datetime.timedelta(
        days=data_types.CORPUS_BACKUP_PUBLIC_LOOKBACK_DAYS)

    corpus_backup_url = corpus_manager.gcs_url_for_backup_file(
        corpus_backup_bucket_name, corpus_fuzzer_name_override
        or target.engine, target.project_qualified_name(), corpus_backup_date)

    if not storage.get(corpus_backup_url):
        logs.log_warn('Failed to find corpus backup %s.' % corpus_backup_url)
        return

    if not _set_public_acl_if_needed(corpus_backup_url):
        return

    filename = (corpus_manager.PUBLIC_BACKUP_TIMESTAMP + os.extsep +
                corpus_manager.BACKUP_ARCHIVE_FORMAT)
    public_url = os.path.join(os.path.dirname(corpus_backup_url), filename)

    if not storage.copy_blob(corpus_backup_url, public_url):
        logs.log_error(
            'Failed to overwrite %s with the latest public corpus backup.' %
            public_url)
        return

    if not _set_public_acl_if_needed(public_url):
        return

    logs.log('Corpus backup %s is now marked public.' % corpus_backup_url)
예제 #2
0
def backup_corpus(backup_bucket_name, corpus, directory):
    """Archive and store corpus as a backup.

  Args:
    backup_bucket_name: Backup bucket.
    corpus: The FuzzTargetCorpus.
    directory: Path to directory to be archived and backuped.

  Returns:
    The backup GCS url, or None on failure.
  """
    if not backup_bucket_name:
        logs.log('No backup bucket provided, skipping corpus backup.')
        return None

    dated_backup_url = None
    timestamp = str(utils.utcnow().date())

    # The archive path for shutil.make_archive should be without an extension.
    backup_archive_path = os.path.join(
        os.path.dirname(os.path.normpath(directory)), timestamp)
    try:
        backup_archive_path = shutil.make_archive(backup_archive_path,
                                                  BACKUP_ARCHIVE_FORMAT,
                                                  directory)
        logs.log('Created corpus backup file.',
                 backup_archive_path=backup_archive_path,
                 directory=directory,
                 size=os.path.getsize(backup_archive_path))

        dated_backup_url = gcs_url_for_backup_file(
            backup_bucket_name, corpus.engine,
            corpus.project_qualified_target_name, timestamp)

        if not storage.copy_file_to(backup_archive_path, dated_backup_url):
            return None

        latest_backup_url = gcs_url_for_backup_file(
            backup_bucket_name, corpus.engine,
            corpus.project_qualified_target_name, LATEST_BACKUP_TIMESTAMP)

        if not storage.copy_blob(dated_backup_url, latest_backup_url):
            logs.log_error('Failed to update latest corpus backup at "%s"' %
                           latest_backup_url)
    except Exception as ex:
        logs.log_error('backup_corpus failed: %s\n' % str(ex),
                       backup_bucket_name=backup_bucket_name,
                       directory=directory,
                       backup_archive_path=backup_archive_path)

    finally:
        # Remove backup archive.
        shell.remove_file(backup_archive_path)

    return dated_backup_url