def get_build_steps(project_dir):
  project_name = os.path.basename(project_dir)
  project_yaml = build_project.load_project_yaml(project_dir)
  if project_yaml['disabled']:
    skip_build('Project "%s" is disabled.' % project_name)

  fuzz_targets = get_targets_list(project_name)
  if not fuzz_targets:
    skip_build('No fuzz targets found for project "%s".' % project_name)

  dockerfile_path = os.path.join(project_dir, 'Dockerfile')
  name = project_yaml['name']
  image = project_yaml['image']
  report_date = datetime.datetime.now().strftime('%Y%m%d')

  build_steps = [
      {
          'args': [
              'clone', 'https://github.com/google/oss-fuzz.git',
          ],
          'name': 'gcr.io/cloud-builders/git',
      },
      {
          'name': 'gcr.io/cloud-builders/docker',
          'args': [
              'build',
              '-t',
              image,
              '.',
          ],
          'dir': 'oss-fuzz/projects/' + name,
      },
      {
          'name':
              image,
          'args': [
              'bash', '-c',
              'srcmap > /workspace/srcmap.json && cat /workspace/srcmap.json'
          ],
          'env': ['OSSFUZZ_REVISION=$REVISION_ID'],
      },
  ]

  env = CONFIGURATION[:]
  out = '/workspace/out/' + SANITIZER
  env.append('OUT=' + out)

  workdir = build_project.workdir_from_dockerfile(dockerfile_path)
  if not workdir:
    workdir = '/src'

  # Compilation step.
  build_steps.append(
      {
          'name': image,
          'env': env,
          'args': [
              'bash',
              '-c',
               # Remove /out to make sure there are non instrumented binaries.
               # `cd /src && cd {workdir}` (where {workdir} is parsed from the
               # Dockerfile). Container Builder overrides our workdir so we need
               # to add this step to set it back.
               'rm -r /out && cd /src && cd {1} && mkdir -p {0} && compile'.format(out, workdir),
          ],
      }
  )

  # Split fuzz targets into batches of CORPUS_DOWNLOAD_BATCH_SIZE.
  for i in xrange(0,  len(fuzz_targets), CORPUS_DOWNLOAD_BATCH_SIZE):
    download_corpus_args = []
    for binary_name in fuzz_targets[i : i+CORPUS_DOWNLOAD_BATCH_SIZE]:
      qualified_name = binary_name
      qualified_name_prefix = '%s_' % project_name
      if not binary_name.startswith(qualified_name_prefix):
        qualified_name = qualified_name_prefix + binary_name

      url = build_project.get_signed_url(
          CORPUS_BACKUP_URL.format(
              project=project_name, fuzzer=qualified_name),
          method='GET')

      corpus_archive_path = os.path.join('/corpus', binary_name + '.zip')
      download_corpus_args.append('%s %s' % (corpus_archive_path, url))

    # Download corpus.
    build_steps.append(
        {
            'name': 'gcr.io/oss-fuzz-base/base-runner',
            'entrypoint': 'download_corpus',
            'args': download_corpus_args,
            'volumes': [{'name': 'corpus', 'path': '/corpus'}],
        }
    )

  # Unpack the corpus and run coverage script.
  build_steps.append(
      {
          'name': 'gcr.io/oss-fuzz-base/base-runner',
          'env': env + [
              'HTTP_PORT=',
              'COVERAGE_EXTRA_ARGS=%s' % project_yaml['coverage_extra_args'].strip()
          ],
          'args': [
              'bash',
              '-c',
              'for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*}; done && coverage',
          ],
          'volumes': [{'name': 'corpus', 'path': '/corpus'}],
      }
  )

  # Upload the report.
  upload_report_url = UPLOAD_URL_FORMAT.format(
      project=project_name, type='reports', date=report_date)
  build_steps.append(
      {
          'name': 'gcr.io/cloud-builders/gsutil',
          'args': [
              '-m', 'cp', '-r',
              os.path.join(out, 'report'),
              upload_report_url,
          ],
      }
  )

  # Upload the fuzzer stats.
  upload_fuzzer_stats_url = UPLOAD_URL_FORMAT.format(
      project=project_name, type='fuzzer_stats', date=report_date)
  build_steps.append(
      {
          'name': 'gcr.io/cloud-builders/gsutil',
          'args': [
              '-m', 'cp', '-r',
              os.path.join(out, 'fuzzer_stats'),
              upload_fuzzer_stats_url,
          ],
      }
  )

  # Upload the fuzzer logs.
  build_steps.append(
      {
          'name': 'gcr.io/cloud-builders/gsutil',
          'args': [
              '-m', 'cp', '-r',
              os.path.join(out, 'logs'),
              UPLOAD_URL_FORMAT.format(
                  project=project_name, type='logs', date=report_date),
          ],
      }
  )

  # Upload srcmap.
  srcmap_upload_url = UPLOAD_URL_FORMAT.format(
      project=project_name, type='srcmap', date=report_date)
  srcmap_upload_url = srcmap_upload_url.rstrip('/') + '.json'
  build_steps.append(
      {
          'name': 'gcr.io/cloud-builders/gsutil',
          'args': [
              'cp',
              '/workspace/srcmap.json',
              srcmap_upload_url,
          ],
      }
  )

  # Update the latest report information file for ClusterFuzz.
  latest_report_info_url = build_project.get_signed_url(
      LATEST_REPORT_INFO_URL.format(project=project_name),
      method='PUT',
      content_type='application/json')
  latest_report_info_body = json.dumps(
      {
          'fuzzer_stats_dir': upload_fuzzer_stats_url,
          'html_report_url': HTML_REPORT_URL_FORMAT.format(
              project=project_name, date=report_date, platform=PLATFORM),
          'report_date': report_date,
          'report_summary_path': os.path.join(
              upload_report_url, PLATFORM, 'summary.json'),
      }
  )

  build_steps.append(
      {
          'name': 'gcr.io/cloud-builders/curl',
          'args': [
              '-H', 'Content-Type: application/json',
              '-X', 'PUT',
              '-d', latest_report_info_body,
              latest_report_info_url,
          ],
      }
  )
  return build_steps
Example #2
0
def get_build_steps(project_dir):
    project_name = os.path.basename(project_dir)
    project_yaml = build_project.load_project_yaml(project_dir)
    if project_yaml['disabled']:
        skip_build('Project "%s" is disabled.' % project_name)

    fuzz_targets = get_targets_list(project_name)
    if not fuzz_targets:
        skip_build('No fuzz targets found for project "%s".' % project_name)

    dockerfile_path = os.path.join(project_dir, 'Dockerfile')
    name = project_yaml['name']
    image = project_yaml['image']
    report_date = datetime.datetime.now().strftime('%Y%m%d')

    build_steps = [
        {
            'args': [
                'clone',
                'https://github.com/google/oss-fuzz.git',
            ],
            'name': 'gcr.io/cloud-builders/git',
        },
        {
            'name': 'gcr.io/cloud-builders/docker',
            'args': [
                'build',
                '-t',
                image,
                '.',
            ],
            'dir': 'oss-fuzz/projects/' + name,
        },
    ]

    env = CONFIGURATION[:]
    out = '/workspace/out/' + SANITIZER
    env.append('OUT=' + out)

    workdir = build_project.workdir_from_dockerfile(dockerfile_path)
    if not workdir:
        workdir = '/src'

    # Compilation step.
    build_steps.append({
        'name':
        image,
        'env':
        env,
        'args': [
            'bash',
            '-c',
            # Remove /out to make sure there are non instrumented binaries.
            # `cd /src && cd {workdir}` (where {workdir} is parsed from the
            # Dockerfile). Container Builder overrides our workdir so we need
            # to add this step to set it back.
            'rm -r /out && cd /src && cd {1} && mkdir -p {0} && compile'.
            format(out, workdir),
        ],
    })

    # Split fuzz targets into batches of CORPUS_DOWNLOAD_BATCH_SIZE.
    for i in xrange(0, len(fuzz_targets), CORPUS_DOWNLOAD_BATCH_SIZE):
        download_corpus_args = []
        for binary_name in fuzz_targets[i:i + CORPUS_DOWNLOAD_BATCH_SIZE]:
            qualified_name = binary_name
            qualified_name_prefix = '%s_' % project_name
            if not binary_name.startswith(qualified_name_prefix):
                qualified_name = qualified_name_prefix + binary_name

            url = build_project.get_signed_url(CORPUS_BACKUP_URL.format(
                project_name, qualified_name),
                                               method='GET')

            corpus_archive_path = os.path.join('/corpus', binary_name + '.zip')
            download_corpus_args.append('%s %s' % (corpus_archive_path, url))

        build_steps.append(
            # Download corpus.
            {
                'name': 'gcr.io/oss-fuzz-base/base-runner',
                'entrypoint': 'download_corpus',
                'args': download_corpus_args,
                'volumes': [{
                    'name': 'corpus',
                    'path': '/corpus'
                }],
            })

    build_steps.extend([
        # Unpack the corpus and run coverage script.
        {
            'name':
            'gcr.io/oss-fuzz-base/base-runner',
            'env':
            env + ['HTTP_PORT=', 'COVERAGE_EXTRA_ARGS='],
            'args': [
                'bash',
                '-c',
                'for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*}; done && coverage',
            ],
            'volumes': [{
                'name': 'corpus',
                'path': '/corpus'
            }],
        },
        # Upload the report.
        {
            'name':
            'gcr.io/cloud-builders/gsutil',
            'args': [
                '-m',
                'rsync',
                '-r',
                '-d',
                os.path.join(out, 'report'),
                UPLOAD_REPORT_URL_FORMAT.format(project_name, report_date),
            ],
        },
        # Upload the fuzzer stats.
        {
            'name':
            'gcr.io/cloud-builders/gsutil',
            'args': [
                '-m',
                'rsync',
                '-r',
                '-d',
                os.path.join(out, 'fuzzer_stats'),
                UPLOAD_FUZZER_STATS_URL_FORMAT.format(project_name,
                                                      report_date),
            ],
        },
    ])

    return build_steps, image
def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
                    image_project, base_images_project):
    """Returns build steps for project."""
    project_yaml = build_project.load_project_yaml(project_name,
                                                   project_yaml_file,
                                                   image_project)
    if project_yaml['disabled']:
        skip_build('Project "%s" is disabled.' % project_name)

    if project_yaml['language'] not in LANGUAGES_WITH_COVERAGE_SUPPORT:
        skip_build(('Project "{project_name}" is written in "{language}", '
                    'coverage is not supported yet.').format(
                        project_name=project_name,
                        language=project_yaml['language']))

    name = project_yaml['name']
    image = project_yaml['image']
    language = project_yaml['language']
    report_date = datetime.datetime.now().strftime('%Y%m%d')

    build_steps = build_lib.project_image_steps(name, image, language)

    env = CONFIGURATION[:]
    out = '/workspace/out/' + SANITIZER
    env.append('OUT=' + out)
    env.append('FUZZING_LANGUAGE=' + language)

    workdir = build_project.workdir_from_dockerfile(dockerfile_lines)
    if not workdir:
        workdir = '/src'

    failure_msg = ('*' * 80 + '\nCoverage build failed.\nTo reproduce, run:\n'
                   'python infra/helper.py build_image {name}\n'
                   'python infra/helper.py build_fuzzers --sanitizer coverage '
                   '{name}\n' + '*' * 80).format(name=name)

    # Compilation step.
    build_steps.append({
        'name':
        image,
        'env':
        env,
        'args': [
            'bash',
            '-c',
            # Remove /out to make sure there are non instrumented binaries.
            # `cd /src && cd {workdir}` (where {workdir} is parsed from the
            # Dockerfile). Container Builder overrides our workdir so we need
            # to add this step to set it back.
            ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} && '
             'compile || (echo "{failure_msg}" && false)'
             ).format(workdir=workdir, out=out, failure_msg=failure_msg),
        ],
    })

    download_corpora_steps = build_lib.download_corpora_steps(project_name)
    if not download_corpora_steps:
        skip_build("Skipping code coverage build for %s.\n" % project_name)

    build_steps.extend(download_corpora_steps)

    failure_msg = ('*' * 80 + '\nCode coverage report generation failed.\n'
                   'To reproduce, run:\n'
                   'python infra/helper.py build_image {name}\n'
                   'python infra/helper.py build_fuzzers --sanitizer coverage '
                   '{name}\n'
                   'python infra/helper.py coverage {name}\n' +
                   '*' * 80).format(name=name)

    # Unpack the corpus and run coverage script.
    coverage_env = env + [
        'HTTP_PORT=',
        'COVERAGE_EXTRA_ARGS=%s' % project_yaml['coverage_extra_args'].strip(),
    ]
    if 'dataflow' in project_yaml['fuzzing_engines']:
        coverage_env.append('FULL_SUMMARY_PER_TARGET=1')

    build_steps.append({
        'name':
        'gcr.io/{0}/base-runner'.format(base_images_project),
        'env':
        coverage_env,
        'args': [
            'bash', '-c',
            ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*} || ('
             'echo "Failed to unpack the corpus for $(basename ${f%%.*}). '
             'This usually means that corpus backup for a particular fuzz '
             'target does not exist. If a fuzz target was added in the last '
             '24 hours, please wait one more day. Otherwise, something is '
             'wrong with the fuzz target or the infrastructure, and corpus '
             'pruning task does not finish successfully." && exit 1'
             '); done && coverage || (echo "' + failure_msg + '" && false)')
        ],
        'volumes': [{
            'name': 'corpus',
            'path': '/corpus'
        }],
    })

    # Upload the report.
    upload_report_url = UPLOAD_URL_FORMAT.format(project=project_name,
                                                 type='reports',
                                                 date=report_date)

    # Delete the existing report as gsutil cannot overwrite it in a sane way due
    # to the lack of `-T` option (it creates a subdir in the destination dir).
    build_steps.append(build_lib.gsutil_rm_rf_step(upload_report_url))
    build_steps.append({
        'name':
        'gcr.io/cloud-builders/gsutil',
        'args': [
            '-m',
            'cp',
            '-r',
            os.path.join(out, 'report'),
            upload_report_url,
        ],
    })

    # Upload the fuzzer stats. Delete the old ones just in case.
    upload_fuzzer_stats_url = UPLOAD_URL_FORMAT.format(project=project_name,
                                                       type='fuzzer_stats',
                                                       date=report_date)
    build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_stats_url))
    build_steps.append({
        'name':
        'gcr.io/cloud-builders/gsutil',
        'args': [
            '-m',
            'cp',
            '-r',
            os.path.join(out, 'fuzzer_stats'),
            upload_fuzzer_stats_url,
        ],
    })

    # Upload the fuzzer logs. Delete the old ones just in case
    upload_fuzzer_logs_url = UPLOAD_URL_FORMAT.format(project=project_name,
                                                      type='logs',
                                                      date=report_date)
    build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_logs_url))
    build_steps.append({
        'name':
        'gcr.io/cloud-builders/gsutil',
        'args': [
            '-m',
            'cp',
            '-r',
            os.path.join(out, 'logs'),
            upload_fuzzer_logs_url,
        ],
    })

    # Upload srcmap.
    srcmap_upload_url = UPLOAD_URL_FORMAT.format(project=project_name,
                                                 type='srcmap',
                                                 date=report_date)
    srcmap_upload_url = srcmap_upload_url.rstrip('/') + '.json'
    build_steps.append({
        'name':
        'gcr.io/cloud-builders/gsutil',
        'args': [
            'cp',
            '/workspace/srcmap.json',
            srcmap_upload_url,
        ],
    })

    # Update the latest report information file for ClusterFuzz.
    latest_report_info_url = build_lib.get_signed_url(
        LATEST_REPORT_INFO_URL.format(project=project_name),
        content_type=LATEST_REPORT_INFO_CONTENT_TYPE)
    latest_report_info_body = json.dumps({
        'fuzzer_stats_dir':
        upload_fuzzer_stats_url,
        'html_report_url':
        HTML_REPORT_URL_FORMAT.format(project=project_name,
                                      date=report_date,
                                      platform=PLATFORM),
        'report_date':
        report_date,
        'report_summary_path':
        os.path.join(upload_report_url, PLATFORM, 'summary.json'),
    })

    build_steps.append(
        build_lib.http_upload_step(latest_report_info_body,
                                   latest_report_info_url,
                                   LATEST_REPORT_INFO_CONTENT_TYPE))
    return build_steps
def get_build_steps(project_dir):
  project_name = os.path.basename(project_dir)
  project_yaml = build_project.load_project_yaml(project_dir)
  if project_yaml['disabled']:
    skip_build('Project "%s" is disabled.' % project_name)

  build_script_path = os.path.join(project_dir, 'build.sh')
  if os.path.exists(build_script_path):
    with open(build_script_path) as fh:
      if project_yaml['language'] not in LANGUAGES_WITH_COVERAGE_SUPPORT:
        skip_build(('Project "{project_name}" is written in "{language}", '
                    'coverage is not supported yet.').format(
                        project_name=project_name,
                        language=project_yaml['language']))

  dockerfile_path = os.path.join(project_dir, 'Dockerfile')
  name = project_yaml['name']
  image = project_yaml['image']
  report_date = datetime.datetime.now().strftime('%Y%m%d')

  build_steps = [
      {
          'args': [
              'clone',
              'https://github.com/google/oss-fuzz.git',
          ],
          'name': 'gcr.io/cloud-builders/git',
      },
      {
          'name': 'gcr.io/cloud-builders/docker',
          'args': [
              'build',
              '-t',
              image,
              '.',
          ],
          'dir': 'oss-fuzz/projects/' + name,
      },
      {
          'name': image,
          'args': [
              'bash', '-c',
              'srcmap > /workspace/srcmap.json && cat /workspace/srcmap.json'
          ],
          'env': ['OSSFUZZ_REVISION=$REVISION_ID'],
      },
  ]

  env = CONFIGURATION[:]
  out = '/workspace/out/' + SANITIZER
  env.append('OUT=' + out)

  workdir = build_project.workdir_from_dockerfile(dockerfile_path)
  if not workdir:
    workdir = '/src'

  failure_msg = ('*' * 80 + '\nCoverage build failed.\nTo reproduce, run:\n'
                 'python infra/helper.py build_image {name}\n'
                 'python infra/helper.py build_fuzzers --sanitizer coverage '
                 '{name}\n' + '*' * 80).format(name=name)

  # Compilation step.
  build_steps.append({
      'name':
          image,
      'env':
          env,
      'args': [
          'bash',
          '-c',
          # Remove /out to make sure there are non instrumented binaries.
          # `cd /src && cd {workdir}` (where {workdir} is parsed from the
          # Dockerfile). Container Builder overrides our workdir so we need
          # to add this step to set it back.
          ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} && '
           'compile || (echo "{failure_msg}" && false)'
          ).format(workdir=workdir, out=out, failure_msg=failure_msg),
      ],
  })

  download_corpora_step = build_lib.download_corpora_step(project_name)
  if not download_corpora_step:
    skip_build("Skipping code coverage build for %s.\n" % project_name)

  build_steps.append(download_corpora_step)

  failure_msg = ('*' * 80 + '\nCode coverage report generation failed.\n'
                 'To reproduce, run:\n'
                 'python infra/helper.py build_image {name}\n'
                 'python infra/helper.py build_fuzzers --sanitizer coverage '
                 '{name}\n'
                 'python infra/helper.py coverage {name}\n' +
                 '*' * 80).format(name=name)

  # Unpack the corpus and run coverage script.
  build_steps.append({
      'name':
          'gcr.io/oss-fuzz-base/base-runner',
      'env':
          env + [
              'HTTP_PORT=',
              'COVERAGE_EXTRA_ARGS=%s' %
              project_yaml['coverage_extra_args'].strip()
          ],
      'args': [
          'bash', '-c',
          ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*} || ('
           'echo "Failed to unpack the corpus for $(basename ${f%%.*}). '
           'This usually means that corpus backup for a particular fuzz '
           'target does not exist. If a fuzz target was added in the last '
           '24 hours, please wait one more day. Otherwise, something is '
           'wrong with the fuzz target or the infrastructure, and corpus '
           'pruning task does not finish successfully." && exit 1'
           '); done && coverage || (echo "' + failure_msg + '" && false)')
      ],
      'volumes': [{
          'name': 'corpus',
          'path': '/corpus'
      }],
  })

  # Upload the report.
  upload_report_url = UPLOAD_URL_FORMAT.format(project=project_name,
                                               type='reports',
                                               date=report_date)
  build_steps.append({
      'name':
          'gcr.io/cloud-builders/gsutil',
      'args': [
          '-m',
          'cp',
          '-r',
          os.path.join(out, 'report'),
          upload_report_url,
      ],
  })

  # Upload the fuzzer stats.
  upload_fuzzer_stats_url = UPLOAD_URL_FORMAT.format(project=project_name,
                                                     type='fuzzer_stats',
                                                     date=report_date)
  build_steps.append({
      'name':
          'gcr.io/cloud-builders/gsutil',
      'args': [
          '-m',
          'cp',
          '-r',
          os.path.join(out, 'fuzzer_stats'),
          upload_fuzzer_stats_url,
      ],
  })

  # Upload the fuzzer logs.
  build_steps.append({
      'name':
          'gcr.io/cloud-builders/gsutil',
      'args': [
          '-m',
          'cp',
          '-r',
          os.path.join(out, 'logs'),
          UPLOAD_URL_FORMAT.format(project=project_name,
                                   type='logs',
                                   date=report_date),
      ],
  })

  # Upload srcmap.
  srcmap_upload_url = UPLOAD_URL_FORMAT.format(project=project_name,
                                               type='srcmap',
                                               date=report_date)
  srcmap_upload_url = srcmap_upload_url.rstrip('/') + '.json'
  build_steps.append({
      'name': 'gcr.io/cloud-builders/gsutil',
      'args': [
          'cp',
          '/workspace/srcmap.json',
          srcmap_upload_url,
      ],
  })

  # Update the latest report information file for ClusterFuzz.
  latest_report_info_url = build_lib.get_signed_url(
      LATEST_REPORT_INFO_URL.format(project=project_name),
      method='PUT',
      content_type='application/json')
  latest_report_info_body = json.dumps({
      'fuzzer_stats_dir':
          upload_fuzzer_stats_url,
      'html_report_url':
          HTML_REPORT_URL_FORMAT.format(project=project_name,
                                        date=report_date,
                                        platform=PLATFORM),
      'report_date':
          report_date,
      'report_summary_path':
          os.path.join(upload_report_url, PLATFORM, 'summary.json'),
  })

  build_steps.append({
      'name':
          'gcr.io/cloud-builders/curl',
      'args': [
          '-H',
          'Content-Type: application/json',
          '-X',
          'PUT',
          '-d',
          latest_report_info_body,
          latest_report_info_url,
      ],
  })
  return build_steps