def get_upload_steps(project, build, timestamp, base_images_project, testing): """Returns the steps for uploading the fuzzer build specified by |project| and |build|. Uses |timestamp| for naming the uploads. Uses |base_images_project| and |testing| for determining which image to use for the upload.""" bucket = build_lib.get_upload_bucket(build.fuzzing_engine, build.architecture, testing) stamped_name = '-'.join([project.name, build.sanitizer, timestamp]) zip_file = stamped_name + '.zip' upload_url = build_lib.get_signed_url( build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, project.name, zip_file)) stamped_srcmap_file = stamped_name + '.srcmap.json' srcmap_url = build_lib.get_signed_url( build_lib.GCS_UPLOAD_URL_FORMAT.format(bucket, project.name, stamped_srcmap_file)) latest_version_file = '-'.join( [project.name, build.sanitizer, LATEST_VERSION_FILENAME]) latest_version_url = build_lib.GCS_UPLOAD_URL_FORMAT.format( bucket, project.name, latest_version_file) latest_version_url = build_lib.get_signed_url( latest_version_url, content_type=LATEST_VERSION_CONTENT_TYPE) uploader_image = get_uploader_image(base_images_project) upload_steps = [ # Zip binaries. { 'name': project.image, 'args': ['bash', '-c', f'cd {build.out} && zip -r {zip_file} *'], }, # Upload srcmap. { 'name': uploader_image, 'args': [ '/workspace/srcmap.json', srcmap_url, ], }, # Upload binaries. { 'name': uploader_image, 'args': [ os.path.join(build.out, zip_file), upload_url, ], }, # Upload targets list. get_targets_list_upload_step(bucket, project, build, uploader_image), # Upload the latest.version file. build_lib.http_upload_step(zip_file, latest_version_url, LATEST_VERSION_CONTENT_TYPE), # Cleanup. get_cleanup_step(project, build), ] return upload_steps
def get_targets_list_upload_step(bucket, project, build, uploader_image): """Returns the step to upload targets_list for |build| of |project| to |bucket|.""" targets_list_url = build_lib.get_signed_url( build_lib.get_targets_list_url(bucket, project.name, build.sanitizer)) return { 'name': uploader_image, 'args': [ f'/workspace/{build.targets_list_filename}', targets_list_url, ], }
def get_build_steps( # pylint: disable=too-many-locals, too-many-arguments project_name, project_yaml, dockerfile_lines, image_project, base_images_project, config): """Returns build steps for project.""" project = build_project.Project(project_name, project_yaml, dockerfile_lines, image_project) if project.disabled: logging.info('Project "%s" is disabled.', project.name) return [] if project.fuzzing_language not in LANGUAGES_WITH_COVERAGE_SUPPORT: logging.info( 'Project "%s" is written in "%s", coverage is not supported yet.', project.name, project.fuzzing_language) return [] report_date = build_project.get_datetime_now().strftime('%Y%m%d') bucket = CoverageBucket(project.name, report_date, PLATFORM, config.testing) build_steps = build_lib.project_image_steps( project.name, project.image, project.fuzzing_language, branch=config.branch, test_image_suffix=config.test_image_suffix) build = build_project.Build(FUZZING_ENGINE, 'coverage', ARCHITECTURE) env = build_project.get_env(project.fuzzing_language, build) build_steps.append( build_project.get_compile_step(project, build, env, config.parallel)) download_corpora_steps = build_lib.download_corpora_steps( project.name, testing=config.testing) if not download_corpora_steps: logging.info('Skipping code coverage build for %s.', project.name) return [] build_steps.extend(download_corpora_steps) failure_msg = ('*' * 80 + '\nCode coverage report generation failed.\n' 'To reproduce, run:\n' f'python infra/helper.py build_image {project.name}\n' 'python infra/helper.py build_fuzzers --sanitizer coverage ' f'{project.name}\n' f'python infra/helper.py coverage {project.name}\n' + '*' * 80) # Unpack the corpus and run coverage script. coverage_env = env + [ 'HTTP_PORT=', f'COVERAGE_EXTRA_ARGS={project.coverage_extra_args.strip()}', ] if 'dataflow' in project.fuzzing_engines: coverage_env.append('FULL_SUMMARY_PER_TARGET=1') build_steps.append({ 'name': build_project.get_runner_image_name(base_images_project, config.test_image_suffix), 'env': coverage_env, 'args': [ 'bash', '-c', ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*} || (' 'echo "Failed to unpack the corpus for $(basename ${f%%.*}). ' 'This usually means that corpus backup for a particular fuzz ' 'target does not exist. If a fuzz target was added in the last ' '24 hours, please wait one more day. Otherwise, something is ' 'wrong with the fuzz target or the infrastructure, and corpus ' 'pruning task does not finish successfully." && exit 1' '); done && coverage || (echo "' + failure_msg + '" && false)') ], 'volumes': [{ 'name': 'corpus', 'path': '/corpus' }], }) # Upload the report. upload_report_url = bucket.get_upload_url('reports') upload_report_by_target_url = bucket.get_upload_url('reports-by-target') # Delete the existing report as gsutil cannot overwrite it in a useful way due # to the lack of `-T` option (it creates a subdir in the destination dir). build_steps.append(build_lib.gsutil_rm_rf_step(upload_report_url)) build_steps.append({ 'name': 'gcr.io/cloud-builders/gsutil', 'args': [ '-m', 'cp', '-r', os.path.join(build.out, 'report'), upload_report_url, ], }) if project.fuzzing_language in LANGUAGES_WITH_INTROSPECTOR_SUPPORT: build_steps.append( build_lib.gsutil_rm_rf_step(upload_report_by_target_url)) build_steps.append({ 'name': 'gcr.io/cloud-builders/gsutil', 'args': [ '-m', 'cp', '-r', os.path.join(build.out, 'report_target'), upload_report_by_target_url, ], }) # Upload the fuzzer stats. Delete the old ones just in case. upload_fuzzer_stats_url = bucket.get_upload_url('fuzzer_stats') build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_stats_url)) build_steps.append({ 'name': 'gcr.io/cloud-builders/gsutil', 'args': [ '-m', 'cp', '-r', os.path.join(build.out, 'fuzzer_stats'), upload_fuzzer_stats_url, ], }) if project.fuzzing_language in LANGUAGES_WITH_INTROSPECTOR_SUPPORT: # Upload the text coverage reports. Delete the old ones just in case. upload_textcov_reports_url = bucket.get_upload_url('textcov_reports') build_steps.append( build_lib.gsutil_rm_rf_step(upload_textcov_reports_url)) build_steps.append({ 'name': 'gcr.io/cloud-builders/gsutil', 'args': [ '-m', 'cp', '-r', os.path.join(build.out, 'textcov_reports'), upload_textcov_reports_url, ], }) # Upload the fuzzer logs. Delete the old ones just in case upload_fuzzer_logs_url = bucket.get_upload_url('logs') build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_logs_url)) build_steps.append({ 'name': 'gcr.io/cloud-builders/gsutil', 'args': [ '-m', 'cp', '-r', os.path.join(build.out, 'logs'), upload_fuzzer_logs_url, ], }) # Upload srcmap. srcmap_upload_url = bucket.get_upload_url('srcmap') srcmap_upload_url = srcmap_upload_url.rstrip('/') + '.json' build_steps.append({ 'name': 'gcr.io/cloud-builders/gsutil', 'args': [ 'cp', '/workspace/srcmap.json', srcmap_upload_url, ], }) # Update the latest report information file for ClusterFuzz. latest_report_info_url = build_lib.get_signed_url( bucket.latest_report_info_url, content_type=LATEST_REPORT_INFO_CONTENT_TYPE) latest_report_info_body = json.dumps({ 'fuzzer_stats_dir': upload_fuzzer_stats_url, 'html_report_url': posixpath.join(bucket.html_report_url, 'index.html'), 'report_date': report_date, 'report_summary_path': os.path.join(upload_report_url, PLATFORM, 'summary.json'), }) build_steps.append( build_lib.http_upload_step(latest_report_info_body, latest_report_info_url, LATEST_REPORT_INFO_CONTENT_TYPE)) return build_steps
def get_build_steps(project_name, project_yaml_file, dockerfile_lines, image_project, base_images_project): """Returns build steps for project.""" project_yaml = load_project_yaml(project_name, project_yaml_file, image_project) name = project_yaml['name'] image = project_yaml['image'] language = project_yaml['language'] run_tests = project_yaml['run_tests'] time_stamp = datetime.datetime.now().strftime('%Y%m%d%H%M') build_steps = build_lib.project_image_steps(name, image, language) # Copy over MSan instrumented libraries. build_steps.append({ 'name': 'gcr.io/{0}/msan-builder'.format(base_images_project), 'args': [ 'bash', '-c', 'cp -r /msan /workspace', ], }) for fuzzing_engine in project_yaml['fuzzing_engines']: for sanitizer in get_sanitizers(project_yaml): for architecture in project_yaml['architectures']: if not is_supported_configuration(fuzzing_engine, sanitizer, architecture): continue env = CONFIGURATIONS['engine-' + fuzzing_engine][:] env.extend(CONFIGURATIONS['sanitizer-' + sanitizer]) out = '/workspace/out/' + sanitizer stamped_name = '-'.join([name, sanitizer, time_stamp]) latest_version_file = '-'.join( [name, sanitizer, LATEST_VERSION_FILENAME]) zip_file = stamped_name + '.zip' stamped_srcmap_file = stamped_name + '.srcmap.json' bucket = build_lib.ENGINE_INFO[fuzzing_engine].upload_bucket if architecture != 'x86_64': bucket += '-' + architecture upload_url = build_lib.get_signed_url( build_lib.GCS_UPLOAD_URL_FORMAT.format( bucket, name, zip_file)) srcmap_url = build_lib.get_signed_url( build_lib.GCS_UPLOAD_URL_FORMAT.format( bucket, name, stamped_srcmap_file)) latest_version_url = build_lib.GCS_UPLOAD_URL_FORMAT.format( bucket, name, latest_version_file) latest_version_url = build_lib.get_signed_url( latest_version_url, content_type=LATEST_VERSION_CONTENT_TYPE) targets_list_filename = build_lib.get_targets_list_filename( sanitizer) targets_list_url = build_lib.get_signed_url( build_lib.get_targets_list_url(bucket, name, sanitizer)) env.append('OUT=' + out) env.append('MSAN_LIBS_PATH=/workspace/msan') env.append('ARCHITECTURE=' + architecture) env.append('FUZZING_LANGUAGE=' + language) workdir = workdir_from_dockerfile(dockerfile_lines) if not workdir: workdir = '/src' failure_msg = ( '*' * 80 + '\nFailed to build.\nTo reproduce, run:\n' 'python infra/helper.py build_image {name}\n' 'python infra/helper.py build_fuzzers --sanitizer ' '{sanitizer} --engine {engine} --architecture ' '{architecture} {name}\n' + '*' * 80).format( name=name, sanitizer=sanitizer, engine=fuzzing_engine, architecture=architecture) build_steps.append( # compile { 'name': image, 'env': env, 'args': [ 'bash', '-c', # Remove /out to break loudly when a build script # incorrectly uses /out instead of $OUT. # `cd /src && cd {workdir}` (where {workdir} is parsed from # the Dockerfile). Container Builder overrides our workdir # so we need to add this step to set it back. ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} ' '&& compile || (echo "{failure_msg}" && false)' ).format(workdir=workdir, out=out, failure_msg=failure_msg), ], }) if sanitizer == 'memory': # Patch dynamic libraries to use instrumented ones. build_steps.append({ 'name': 'gcr.io/{0}/msan-builder'.format(base_images_project), 'args': [ 'bash', '-c', # TODO(ochang): Replace with just patch_build.py once # permission in image is fixed. 'python /usr/local/bin/patch_build.py {0}'.format( out), ], }) if run_tests: failure_msg = ( '*' * 80 + '\nBuild checks failed.\n' 'To reproduce, run:\n' 'python infra/helper.py build_image {name}\n' 'python infra/helper.py build_fuzzers --sanitizer ' '{sanitizer} --engine {engine} --architecture ' '{architecture} {name}\n' 'python infra/helper.py check_build --sanitizer ' '{sanitizer} --engine {engine} --architecture ' '{architecture} {name}\n' + '*' * 80).format( name=name, sanitizer=sanitizer, engine=fuzzing_engine, architecture=architecture) build_steps.append( # test binaries { 'name': 'gcr.io/{0}/base-runner'.format( base_images_project), 'env': env, 'args': [ 'bash', '-c', 'test_all || (echo "{0}" && false)'.format( failure_msg) ], }) if project_yaml['labels']: # write target labels build_steps.append({ 'name': image, 'env': env, 'args': [ '/usr/local/bin/write_labels.py', json.dumps(project_yaml['labels']), out, ], }) if sanitizer == 'dataflow' and fuzzing_engine == 'dataflow': dataflow_steps = dataflow_post_build_steps( name, env, base_images_project) if dataflow_steps: build_steps.extend(dataflow_steps) else: sys.stderr.write( 'Skipping dataflow post build steps.\n') build_steps.extend([ # generate targets list { 'name': 'gcr.io/{0}/base-runner'.format(base_images_project), 'env': env, 'args': [ 'bash', '-c', 'targets_list > /workspace/{0}'.format( targets_list_filename), ], }, # zip binaries { 'name': image, 'args': [ 'bash', '-c', 'cd {out} && zip -r {zip_file} *'.format( out=out, zip_file=zip_file) ], }, # upload srcmap { 'name': 'gcr.io/{0}/uploader'.format(base_images_project), 'args': [ '/workspace/srcmap.json', srcmap_url, ], }, # upload binaries { 'name': 'gcr.io/{0}/uploader'.format(base_images_project), 'args': [ os.path.join(out, zip_file), upload_url, ], }, # upload targets list { 'name': 'gcr.io/{0}/uploader'.format(base_images_project), 'args': [ '/workspace/{0}'.format(targets_list_filename), targets_list_url, ], }, # upload the latest.version file build_lib.http_upload_step(zip_file, latest_version_url, LATEST_VERSION_CONTENT_TYPE), # cleanup { 'name': image, 'args': [ 'bash', '-c', 'rm -r ' + out, ], }, ]) return build_steps
def get_build_steps(project_name, project_yaml_file, dockerfile_lines, image_project, base_images_project): """Returns build steps for project.""" project_yaml = build_project.load_project_yaml(project_name, project_yaml_file, image_project) if project_yaml['disabled']: skip_build('Project "%s" is disabled.' % project_name) if project_yaml['language'] not in LANGUAGES_WITH_COVERAGE_SUPPORT: skip_build(('Project "{project_name}" is written in "{language}", ' 'coverage is not supported yet.').format( project_name=project_name, language=project_yaml['language'])) name = project_yaml['name'] image = project_yaml['image'] language = project_yaml['language'] report_date = datetime.datetime.now().strftime('%Y%m%d') build_steps = build_lib.project_image_steps(name, image, language) env = CONFIGURATION[:] out = '/workspace/out/' + SANITIZER env.append('OUT=' + out) env.append('FUZZING_LANGUAGE=' + language) workdir = build_project.workdir_from_dockerfile(dockerfile_lines) if not workdir: workdir = '/src' failure_msg = ('*' * 80 + '\nCoverage build failed.\nTo reproduce, run:\n' 'python infra/helper.py build_image {name}\n' 'python infra/helper.py build_fuzzers --sanitizer coverage ' '{name}\n' + '*' * 80).format(name=name) # Compilation step. build_steps.append({ 'name': image, 'env': env, 'args': [ 'bash', '-c', # Remove /out to make sure there are non instrumented binaries. # `cd /src && cd {workdir}` (where {workdir} is parsed from the # Dockerfile). Container Builder overrides our workdir so we need # to add this step to set it back. ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} && ' 'compile || (echo "{failure_msg}" && false)' ).format(workdir=workdir, out=out, failure_msg=failure_msg), ], }) download_corpora_steps = build_lib.download_corpora_steps(project_name) if not download_corpora_steps: skip_build("Skipping code coverage build for %s.\n" % project_name) build_steps.extend(download_corpora_steps) failure_msg = ('*' * 80 + '\nCode coverage report generation failed.\n' 'To reproduce, run:\n' 'python infra/helper.py build_image {name}\n' 'python infra/helper.py build_fuzzers --sanitizer coverage ' '{name}\n' 'python infra/helper.py coverage {name}\n' + '*' * 80).format(name=name) # Unpack the corpus and run coverage script. coverage_env = env + [ 'HTTP_PORT=', 'COVERAGE_EXTRA_ARGS=%s' % project_yaml['coverage_extra_args'].strip(), ] if 'dataflow' in project_yaml['fuzzing_engines']: coverage_env.append('FULL_SUMMARY_PER_TARGET=1') build_steps.append({ 'name': 'gcr.io/{0}/base-runner'.format(base_images_project), 'env': coverage_env, 'args': [ 'bash', '-c', ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*} || (' 'echo "Failed to unpack the corpus for $(basename ${f%%.*}). ' 'This usually means that corpus backup for a particular fuzz ' 'target does not exist. If a fuzz target was added in the last ' '24 hours, please wait one more day. Otherwise, something is ' 'wrong with the fuzz target or the infrastructure, and corpus ' 'pruning task does not finish successfully." && exit 1' '); done && coverage || (echo "' + failure_msg + '" && false)') ], 'volumes': [{ 'name': 'corpus', 'path': '/corpus' }], }) # Upload the report. upload_report_url = UPLOAD_URL_FORMAT.format(project=project_name, type='reports', date=report_date) # Delete the existing report as gsutil cannot overwrite it in a sane way due # to the lack of `-T` option (it creates a subdir in the destination dir). build_steps.append(build_lib.gsutil_rm_rf_step(upload_report_url)) build_steps.append({ 'name': 'gcr.io/cloud-builders/gsutil', 'args': [ '-m', 'cp', '-r', os.path.join(out, 'report'), upload_report_url, ], }) # Upload the fuzzer stats. Delete the old ones just in case. upload_fuzzer_stats_url = UPLOAD_URL_FORMAT.format(project=project_name, type='fuzzer_stats', date=report_date) build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_stats_url)) build_steps.append({ 'name': 'gcr.io/cloud-builders/gsutil', 'args': [ '-m', 'cp', '-r', os.path.join(out, 'fuzzer_stats'), upload_fuzzer_stats_url, ], }) # Upload the fuzzer logs. Delete the old ones just in case upload_fuzzer_logs_url = UPLOAD_URL_FORMAT.format(project=project_name, type='logs', date=report_date) build_steps.append(build_lib.gsutil_rm_rf_step(upload_fuzzer_logs_url)) build_steps.append({ 'name': 'gcr.io/cloud-builders/gsutil', 'args': [ '-m', 'cp', '-r', os.path.join(out, 'logs'), upload_fuzzer_logs_url, ], }) # Upload srcmap. srcmap_upload_url = UPLOAD_URL_FORMAT.format(project=project_name, type='srcmap', date=report_date) srcmap_upload_url = srcmap_upload_url.rstrip('/') + '.json' build_steps.append({ 'name': 'gcr.io/cloud-builders/gsutil', 'args': [ 'cp', '/workspace/srcmap.json', srcmap_upload_url, ], }) # Update the latest report information file for ClusterFuzz. latest_report_info_url = build_lib.get_signed_url( LATEST_REPORT_INFO_URL.format(project=project_name), content_type=LATEST_REPORT_INFO_CONTENT_TYPE) latest_report_info_body = json.dumps({ 'fuzzer_stats_dir': upload_fuzzer_stats_url, 'html_report_url': HTML_REPORT_URL_FORMAT.format(project=project_name, date=report_date, platform=PLATFORM), 'report_date': report_date, 'report_summary_path': os.path.join(upload_report_url, PLATFORM, 'summary.json'), }) build_steps.append( build_lib.http_upload_step(latest_report_info_body, latest_report_info_url, LATEST_REPORT_INFO_CONTENT_TYPE)) return build_steps
def get_build_steps(project_dir): project_name = os.path.basename(project_dir) project_yaml = build_project.load_project_yaml(project_dir) if project_yaml['disabled']: skip_build('Project "%s" is disabled.' % project_name) build_script_path = os.path.join(project_dir, 'build.sh') if os.path.exists(build_script_path): with open(build_script_path) as fh: if project_yaml['language'] not in LANGUAGES_WITH_COVERAGE_SUPPORT: skip_build(('Project "{project_name}" is written in "{language}", ' 'coverage is not supported yet.').format( project_name=project_name, language=project_yaml['language'])) dockerfile_path = os.path.join(project_dir, 'Dockerfile') name = project_yaml['name'] image = project_yaml['image'] report_date = datetime.datetime.now().strftime('%Y%m%d') build_steps = [ { 'args': [ 'clone', 'https://github.com/google/oss-fuzz.git', ], 'name': 'gcr.io/cloud-builders/git', }, { 'name': 'gcr.io/cloud-builders/docker', 'args': [ 'build', '-t', image, '.', ], 'dir': 'oss-fuzz/projects/' + name, }, { 'name': image, 'args': [ 'bash', '-c', 'srcmap > /workspace/srcmap.json && cat /workspace/srcmap.json' ], 'env': ['OSSFUZZ_REVISION=$REVISION_ID'], }, ] env = CONFIGURATION[:] out = '/workspace/out/' + SANITIZER env.append('OUT=' + out) workdir = build_project.workdir_from_dockerfile(dockerfile_path) if not workdir: workdir = '/src' failure_msg = ('*' * 80 + '\nCoverage build failed.\nTo reproduce, run:\n' 'python infra/helper.py build_image {name}\n' 'python infra/helper.py build_fuzzers --sanitizer coverage ' '{name}\n' + '*' * 80).format(name=name) # Compilation step. build_steps.append({ 'name': image, 'env': env, 'args': [ 'bash', '-c', # Remove /out to make sure there are non instrumented binaries. # `cd /src && cd {workdir}` (where {workdir} is parsed from the # Dockerfile). Container Builder overrides our workdir so we need # to add this step to set it back. ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} && ' 'compile || (echo "{failure_msg}" && false)' ).format(workdir=workdir, out=out, failure_msg=failure_msg), ], }) download_corpora_step = build_lib.download_corpora_step(project_name) if not download_corpora_step: skip_build("Skipping code coverage build for %s.\n" % project_name) build_steps.append(download_corpora_step) failure_msg = ('*' * 80 + '\nCode coverage report generation failed.\n' 'To reproduce, run:\n' 'python infra/helper.py build_image {name}\n' 'python infra/helper.py build_fuzzers --sanitizer coverage ' '{name}\n' 'python infra/helper.py coverage {name}\n' + '*' * 80).format(name=name) # Unpack the corpus and run coverage script. build_steps.append({ 'name': 'gcr.io/oss-fuzz-base/base-runner', 'env': env + [ 'HTTP_PORT=', 'COVERAGE_EXTRA_ARGS=%s' % project_yaml['coverage_extra_args'].strip() ], 'args': [ 'bash', '-c', ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*} || (' 'echo "Failed to unpack the corpus for $(basename ${f%%.*}). ' 'This usually means that corpus backup for a particular fuzz ' 'target does not exist. If a fuzz target was added in the last ' '24 hours, please wait one more day. Otherwise, something is ' 'wrong with the fuzz target or the infrastructure, and corpus ' 'pruning task does not finish successfully." && exit 1' '); done && coverage || (echo "' + failure_msg + '" && false)') ], 'volumes': [{ 'name': 'corpus', 'path': '/corpus' }], }) # Upload the report. upload_report_url = UPLOAD_URL_FORMAT.format(project=project_name, type='reports', date=report_date) build_steps.append({ 'name': 'gcr.io/cloud-builders/gsutil', 'args': [ '-m', 'cp', '-r', os.path.join(out, 'report'), upload_report_url, ], }) # Upload the fuzzer stats. upload_fuzzer_stats_url = UPLOAD_URL_FORMAT.format(project=project_name, type='fuzzer_stats', date=report_date) build_steps.append({ 'name': 'gcr.io/cloud-builders/gsutil', 'args': [ '-m', 'cp', '-r', os.path.join(out, 'fuzzer_stats'), upload_fuzzer_stats_url, ], }) # Upload the fuzzer logs. build_steps.append({ 'name': 'gcr.io/cloud-builders/gsutil', 'args': [ '-m', 'cp', '-r', os.path.join(out, 'logs'), UPLOAD_URL_FORMAT.format(project=project_name, type='logs', date=report_date), ], }) # Upload srcmap. srcmap_upload_url = UPLOAD_URL_FORMAT.format(project=project_name, type='srcmap', date=report_date) srcmap_upload_url = srcmap_upload_url.rstrip('/') + '.json' build_steps.append({ 'name': 'gcr.io/cloud-builders/gsutil', 'args': [ 'cp', '/workspace/srcmap.json', srcmap_upload_url, ], }) # Update the latest report information file for ClusterFuzz. latest_report_info_url = build_lib.get_signed_url( LATEST_REPORT_INFO_URL.format(project=project_name), method='PUT', content_type='application/json') latest_report_info_body = json.dumps({ 'fuzzer_stats_dir': upload_fuzzer_stats_url, 'html_report_url': HTML_REPORT_URL_FORMAT.format(project=project_name, date=report_date, platform=PLATFORM), 'report_date': report_date, 'report_summary_path': os.path.join(upload_report_url, PLATFORM, 'summary.json'), }) build_steps.append({ 'name': 'gcr.io/cloud-builders/curl', 'args': [ '-H', 'Content-Type: application/json', '-X', 'PUT', '-d', latest_report_info_body, latest_report_info_url, ], }) return build_steps