コード例 #1
0
ファイル: build_project.py プロジェクト: zyantific/oss-fuzz
def dataflow_post_build_steps(project_name, env, base_images_project, testing,
                              test_image_suffix):
    """Appends dataflow post build steps."""
    steps = build_lib.download_corpora_steps(project_name, testing)
    if not steps:
        return None

    steps.append({
        'name':
        get_runner_image_name(base_images_project, test_image_suffix),
        'env':
        env + [
            'COLLECT_DFT_TIMEOUT=2h',
            'DFT_FILE_SIZE_LIMIT=65535',
            'DFT_MIN_TIMEOUT=2.0',
            'DFT_TIMEOUT_RANGE=6.0',
        ],
        'args': [
            'bash', '-c',
            ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*}; done && '
             'collect_dft || (echo "DFT collection failed." && false)')
        ],
        'volumes': [{
            'name': 'corpus',
            'path': '/corpus'
        }],
    })
    return steps
コード例 #2
0
def dataflow_post_build_steps(project_name, env):
    steps = build_lib.download_corpora_steps(project_name)
    if not steps:
        return None

    steps.append({
        'name':
        'gcr.io/oss-fuzz-base/base-runner',
        'env':
        env + [
            'COLLECT_DFT_TIMEOUT=2h',
            'DFT_FILE_SIZE_LIMIT=65535',
            'DFT_MIN_TIMEOUT=2.0',
            'DFT_TIMEOUT_RANGE=6.0',
        ],
        'args': [
            'bash', '-c',
            ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*}; done && '
             'collect_dft || (echo "DFT collection failed." && false)')
        ],
        'volumes': [{
            'name': 'corpus',
            'path': '/corpus'
        }],
    })
    return steps
コード例 #3
0
ファイル: build_project.py プロジェクト: mwwolters/oss-fuzz
def dataflow_post_build_steps(project_name, env):
  steps = build_lib.download_corpora_steps(project_name)
  if not steps:
    return None

  steps.append({
      'name': 'gcr.io/oss-fuzz-base/base-runner',
      'env': env,
      'args': [
          'bash', '-c',
          ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*}; done && '
           'collect_dft || (echo "DFT collection failed." && false)')
      ],
      'volumes': [{
          'name': 'corpus',
          'path': '/corpus'
      }],
  })
  return steps
コード例 #4
0
def get_build_steps(  # pylint: disable=too-many-locals, too-many-arguments
        project_name, project_yaml, dockerfile_lines, image_project,
        base_images_project, config):
    """Returns build steps for project."""
    project = build_project.Project(project_name, project_yaml,
                                    dockerfile_lines, image_project)
    if project.disabled:
        logging.info('Project "%s" is disabled.', project.name)
        return []

    if project.fuzzing_language not in LANGUAGES_WITH_COVERAGE_SUPPORT:
        logging.info(
            'Project "%s" is written in "%s", coverage is not supported yet.',
            project.name, project.fuzzing_language)
        return []

    report_date = build_project.get_datetime_now().strftime('%Y%m%d')
    bucket = CoverageBucket(project.name, report_date, PLATFORM,
                            config.testing)

    build_steps = build_lib.project_image_steps(
        project.name,
        project.image,
        project.fuzzing_language,
        branch=config.branch,
        test_image_suffix=config.test_image_suffix)

    build = build_project.Build(FUZZING_ENGINE, 'coverage', ARCHITECTURE)
    env = build_project.get_env(project.fuzzing_language, build)
    build_steps.append(
        build_project.get_compile_step(project, build, env, config.parallel))
    download_corpora_steps = build_lib.download_corpora_steps(
        project.name, testing=config.testing)
    if not download_corpora_steps:
        logging.info('Skipping code coverage build for %s.', project.name)
        return []

    build_steps.extend(download_corpora_steps)

    failure_msg = ('*' * 80 + '\nCode coverage report generation failed.\n'
                   'To reproduce, run:\n'
                   f'python infra/helper.py build_image {project.name}\n'
                   'python infra/helper.py build_fuzzers --sanitizer coverage '
                   f'{project.name}\n'
                   f'python infra/helper.py coverage {project.name}\n' +
                   '*' * 80)

    # Unpack the corpus and run coverage script.
    coverage_env = env + [
        'HTTP_PORT=',
        f'COVERAGE_EXTRA_ARGS={project.coverage_extra_args.strip()}',
    ]
    if 'dataflow' in project.fuzzing_engines:
        coverage_env.append('FULL_SUMMARY_PER_TARGET=1')

    build_steps.append({
        'name':
        build_project.get_runner_image_name(base_images_project,
                                            config.test_image_suffix),
        'env':
        coverage_env,
        'args': [
            'bash', '-c',
            ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*} || ('
             'echo "Failed to unpack the corpus for $(basename ${f%%.*}). '
             'This usually means that corpus backup for a particular fuzz '
             'target does not exist. If a fuzz target was added in the last '
             '24 hours, please wait one more day. Otherwise, something is '
             'wrong with the fuzz target or the infrastructure, and corpus '
             'pruning task does not finish successfully." && exit 1'
             '); done && coverage || (echo "' + failure_msg + '" && false)')
        ],
        'volumes': [{
            'name': 'corpus',
            'path': '/corpus'
        }],
    })

    # Upload the report.
    upload_report_url = bucket.get_upload_url('reports')
    upload_report_by_target_url = bucket.get_upload_url('reports-by-target')

    # Delete the existing report as gsutil cannot overwrite it in a useful way due
    # to the lack of `-T` option (it creates a subdir in the destination dir).
    build_steps.append(build_lib.gsutil_rm_rf_step(upload_report_url))
    build_steps.append({
        'name':
        'gcr.io/cloud-builders/gsutil',
        'args': [
            '-m',
            'cp',
            '-r',
            os.path.join(build.out, 'report'),
            upload_report_url,
        ],
    })

    if project.fuzzing_language in LANGUAGES_WITH_INTROSPECTOR_SUPPORT:
        build_steps.append(
            build_lib.gsutil_rm_rf_step(upload_report_by_target_url))
        build_steps.append({
            'name':
            'gcr.io/cloud-builders/gsutil',
            'args': [
                '-m',
                'cp',
                '-r',
                os.path.join(build.out, 'report_target'),
                upload_report_by_target_url,
            ],
        })

    # Upload the fuzzer stats. Delete the old ones just in case.
    upload_fuzzer_stats_url = bucket.get_upload_url('fuzzer_stats')

    build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_stats_url))
    build_steps.append({
        'name':
        'gcr.io/cloud-builders/gsutil',
        'args': [
            '-m',
            'cp',
            '-r',
            os.path.join(build.out, 'fuzzer_stats'),
            upload_fuzzer_stats_url,
        ],
    })

    if project.fuzzing_language in LANGUAGES_WITH_INTROSPECTOR_SUPPORT:
        # Upload the text coverage reports. Delete the old ones just in case.
        upload_textcov_reports_url = bucket.get_upload_url('textcov_reports')

        build_steps.append(
            build_lib.gsutil_rm_rf_step(upload_textcov_reports_url))
        build_steps.append({
            'name':
            'gcr.io/cloud-builders/gsutil',
            'args': [
                '-m',
                'cp',
                '-r',
                os.path.join(build.out, 'textcov_reports'),
                upload_textcov_reports_url,
            ],
        })

    # Upload the fuzzer logs. Delete the old ones just in case
    upload_fuzzer_logs_url = bucket.get_upload_url('logs')
    build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_logs_url))
    build_steps.append({
        'name':
        'gcr.io/cloud-builders/gsutil',
        'args': [
            '-m',
            'cp',
            '-r',
            os.path.join(build.out, 'logs'),
            upload_fuzzer_logs_url,
        ],
    })

    # Upload srcmap.
    srcmap_upload_url = bucket.get_upload_url('srcmap')
    srcmap_upload_url = srcmap_upload_url.rstrip('/') + '.json'
    build_steps.append({
        'name':
        'gcr.io/cloud-builders/gsutil',
        'args': [
            'cp',
            '/workspace/srcmap.json',
            srcmap_upload_url,
        ],
    })

    # Update the latest report information file for ClusterFuzz.
    latest_report_info_url = build_lib.get_signed_url(
        bucket.latest_report_info_url,
        content_type=LATEST_REPORT_INFO_CONTENT_TYPE)
    latest_report_info_body = json.dumps({
        'fuzzer_stats_dir':
        upload_fuzzer_stats_url,
        'html_report_url':
        posixpath.join(bucket.html_report_url, 'index.html'),
        'report_date':
        report_date,
        'report_summary_path':
        os.path.join(upload_report_url, PLATFORM, 'summary.json'),
    })

    build_steps.append(
        build_lib.http_upload_step(latest_report_info_body,
                                   latest_report_info_url,
                                   LATEST_REPORT_INFO_CONTENT_TYPE))

    return build_steps
コード例 #5
0
def get_build_steps(project_name, project_yaml_file, dockerfile_lines,
                    image_project, base_images_project):
    """Returns build steps for project."""
    project_yaml = build_project.load_project_yaml(project_name,
                                                   project_yaml_file,
                                                   image_project)
    if project_yaml['disabled']:
        skip_build('Project "%s" is disabled.' % project_name)

    if project_yaml['language'] not in LANGUAGES_WITH_COVERAGE_SUPPORT:
        skip_build(('Project "{project_name}" is written in "{language}", '
                    'coverage is not supported yet.').format(
                        project_name=project_name,
                        language=project_yaml['language']))

    name = project_yaml['name']
    image = project_yaml['image']
    language = project_yaml['language']
    report_date = datetime.datetime.now().strftime('%Y%m%d')

    build_steps = build_lib.project_image_steps(name, image, language)

    env = CONFIGURATION[:]
    out = '/workspace/out/' + SANITIZER
    env.append('OUT=' + out)
    env.append('FUZZING_LANGUAGE=' + language)

    workdir = build_project.workdir_from_dockerfile(dockerfile_lines)
    if not workdir:
        workdir = '/src'

    failure_msg = ('*' * 80 + '\nCoverage build failed.\nTo reproduce, run:\n'
                   'python infra/helper.py build_image {name}\n'
                   'python infra/helper.py build_fuzzers --sanitizer coverage '
                   '{name}\n' + '*' * 80).format(name=name)

    # Compilation step.
    build_steps.append({
        'name':
        image,
        'env':
        env,
        'args': [
            'bash',
            '-c',
            # Remove /out to make sure there are non instrumented binaries.
            # `cd /src && cd {workdir}` (where {workdir} is parsed from the
            # Dockerfile). Container Builder overrides our workdir so we need
            # to add this step to set it back.
            ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} && '
             'compile || (echo "{failure_msg}" && false)'
             ).format(workdir=workdir, out=out, failure_msg=failure_msg),
        ],
    })

    download_corpora_steps = build_lib.download_corpora_steps(project_name)
    if not download_corpora_steps:
        skip_build("Skipping code coverage build for %s.\n" % project_name)

    build_steps.extend(download_corpora_steps)

    failure_msg = ('*' * 80 + '\nCode coverage report generation failed.\n'
                   'To reproduce, run:\n'
                   'python infra/helper.py build_image {name}\n'
                   'python infra/helper.py build_fuzzers --sanitizer coverage '
                   '{name}\n'
                   'python infra/helper.py coverage {name}\n' +
                   '*' * 80).format(name=name)

    # Unpack the corpus and run coverage script.
    coverage_env = env + [
        'HTTP_PORT=',
        'COVERAGE_EXTRA_ARGS=%s' % project_yaml['coverage_extra_args'].strip(),
    ]
    if 'dataflow' in project_yaml['fuzzing_engines']:
        coverage_env.append('FULL_SUMMARY_PER_TARGET=1')

    build_steps.append({
        'name':
        'gcr.io/{0}/base-runner'.format(base_images_project),
        'env':
        coverage_env,
        'args': [
            'bash', '-c',
            ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*} || ('
             'echo "Failed to unpack the corpus for $(basename ${f%%.*}). '
             'This usually means that corpus backup for a particular fuzz '
             'target does not exist. If a fuzz target was added in the last '
             '24 hours, please wait one more day. Otherwise, something is '
             'wrong with the fuzz target or the infrastructure, and corpus '
             'pruning task does not finish successfully." && exit 1'
             '); done && coverage || (echo "' + failure_msg + '" && false)')
        ],
        'volumes': [{
            'name': 'corpus',
            'path': '/corpus'
        }],
    })

    # Upload the report.
    upload_report_url = UPLOAD_URL_FORMAT.format(project=project_name,
                                                 type='reports',
                                                 date=report_date)

    # Delete the existing report as gsutil cannot overwrite it in a sane way due
    # to the lack of `-T` option (it creates a subdir in the destination dir).
    build_steps.append(build_lib.gsutil_rm_rf_step(upload_report_url))
    build_steps.append({
        'name':
        'gcr.io/cloud-builders/gsutil',
        'args': [
            '-m',
            'cp',
            '-r',
            os.path.join(out, 'report'),
            upload_report_url,
        ],
    })

    # Upload the fuzzer stats. Delete the old ones just in case.
    upload_fuzzer_stats_url = UPLOAD_URL_FORMAT.format(project=project_name,
                                                       type='fuzzer_stats',
                                                       date=report_date)
    build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_stats_url))
    build_steps.append({
        'name':
        'gcr.io/cloud-builders/gsutil',
        'args': [
            '-m',
            'cp',
            '-r',
            os.path.join(out, 'fuzzer_stats'),
            upload_fuzzer_stats_url,
        ],
    })

    # Upload the fuzzer logs. Delete the old ones just in case
    upload_fuzzer_logs_url = UPLOAD_URL_FORMAT.format(project=project_name,
                                                      type='logs',
                                                      date=report_date)
    build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_logs_url))
    build_steps.append({
        'name':
        'gcr.io/cloud-builders/gsutil',
        'args': [
            '-m',
            'cp',
            '-r',
            os.path.join(out, 'logs'),
            upload_fuzzer_logs_url,
        ],
    })

    # Upload srcmap.
    srcmap_upload_url = UPLOAD_URL_FORMAT.format(project=project_name,
                                                 type='srcmap',
                                                 date=report_date)
    srcmap_upload_url = srcmap_upload_url.rstrip('/') + '.json'
    build_steps.append({
        'name':
        'gcr.io/cloud-builders/gsutil',
        'args': [
            'cp',
            '/workspace/srcmap.json',
            srcmap_upload_url,
        ],
    })

    # Update the latest report information file for ClusterFuzz.
    latest_report_info_url = build_lib.get_signed_url(
        LATEST_REPORT_INFO_URL.format(project=project_name),
        content_type=LATEST_REPORT_INFO_CONTENT_TYPE)
    latest_report_info_body = json.dumps({
        'fuzzer_stats_dir':
        upload_fuzzer_stats_url,
        'html_report_url':
        HTML_REPORT_URL_FORMAT.format(project=project_name,
                                      date=report_date,
                                      platform=PLATFORM),
        'report_date':
        report_date,
        'report_summary_path':
        os.path.join(upload_report_url, PLATFORM, 'summary.json'),
    })

    build_steps.append(
        build_lib.http_upload_step(latest_report_info_body,
                                   latest_report_info_url,
                                   LATEST_REPORT_INFO_CONTENT_TYPE))
    return build_steps