def __init__(self, fuzzing_engine, sanitizer, architecture): self.fuzzing_engine = fuzzing_engine self.sanitizer = sanitizer self.architecture = architecture self.targets_list_filename = build_lib.get_targets_list_filename( self.sanitizer)
def get_build_steps(project_name, project_yaml_file, dockerfile_lines, image_project, base_images_project): """Returns build steps for project.""" project_yaml = load_project_yaml(project_name, project_yaml_file, image_project) name = project_yaml['name'] image = project_yaml['image'] language = project_yaml['language'] run_tests = project_yaml['run_tests'] time_stamp = datetime.datetime.now().strftime('%Y%m%d%H%M') build_steps = build_lib.project_image_steps(name, image, language) # Copy over MSan instrumented libraries. build_steps.append({ 'name': 'gcr.io/{0}/msan-builder'.format(base_images_project), 'args': [ 'bash', '-c', 'cp -r /msan /workspace', ], }) for fuzzing_engine in project_yaml['fuzzing_engines']: for sanitizer in get_sanitizers(project_yaml): for architecture in project_yaml['architectures']: if not is_supported_configuration(fuzzing_engine, sanitizer, architecture): continue env = CONFIGURATIONS['engine-' + fuzzing_engine][:] env.extend(CONFIGURATIONS['sanitizer-' + sanitizer]) out = '/workspace/out/' + sanitizer stamped_name = '-'.join([name, sanitizer, time_stamp]) latest_version_file = '-'.join( [name, sanitizer, LATEST_VERSION_FILENAME]) zip_file = stamped_name + '.zip' stamped_srcmap_file = stamped_name + '.srcmap.json' bucket = build_lib.ENGINE_INFO[fuzzing_engine].upload_bucket if architecture != 'x86_64': bucket += '-' + architecture upload_url = build_lib.get_signed_url( build_lib.GCS_UPLOAD_URL_FORMAT.format( bucket, name, zip_file)) srcmap_url = build_lib.get_signed_url( build_lib.GCS_UPLOAD_URL_FORMAT.format( bucket, name, stamped_srcmap_file)) latest_version_url = build_lib.GCS_UPLOAD_URL_FORMAT.format( bucket, name, latest_version_file) latest_version_url = build_lib.get_signed_url( latest_version_url, content_type=LATEST_VERSION_CONTENT_TYPE) targets_list_filename = build_lib.get_targets_list_filename( sanitizer) targets_list_url = build_lib.get_signed_url( build_lib.get_targets_list_url(bucket, name, sanitizer)) env.append('OUT=' + out) env.append('MSAN_LIBS_PATH=/workspace/msan') env.append('ARCHITECTURE=' + architecture) env.append('FUZZING_LANGUAGE=' + language) workdir = workdir_from_dockerfile(dockerfile_lines) if not workdir: workdir = '/src' failure_msg = ( '*' * 80 + '\nFailed to build.\nTo reproduce, run:\n' 'python infra/helper.py build_image {name}\n' 'python infra/helper.py build_fuzzers --sanitizer ' '{sanitizer} --engine {engine} --architecture ' '{architecture} {name}\n' + '*' * 80).format( name=name, sanitizer=sanitizer, engine=fuzzing_engine, architecture=architecture) build_steps.append( # compile { 'name': image, 'env': env, 'args': [ 'bash', '-c', # Remove /out to break loudly when a build script # incorrectly uses /out instead of $OUT. # `cd /src && cd {workdir}` (where {workdir} is parsed from # the Dockerfile). Container Builder overrides our workdir # so we need to add this step to set it back. ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} ' '&& compile || (echo "{failure_msg}" && false)' ).format(workdir=workdir, out=out, failure_msg=failure_msg), ], }) if sanitizer == 'memory': # Patch dynamic libraries to use instrumented ones. build_steps.append({ 'name': 'gcr.io/{0}/msan-builder'.format(base_images_project), 'args': [ 'bash', '-c', # TODO(ochang): Replace with just patch_build.py once # permission in image is fixed. 'python /usr/local/bin/patch_build.py {0}'.format( out), ], }) if run_tests: failure_msg = ( '*' * 80 + '\nBuild checks failed.\n' 'To reproduce, run:\n' 'python infra/helper.py build_image {name}\n' 'python infra/helper.py build_fuzzers --sanitizer ' '{sanitizer} --engine {engine} --architecture ' '{architecture} {name}\n' 'python infra/helper.py check_build --sanitizer ' '{sanitizer} --engine {engine} --architecture ' '{architecture} {name}\n' + '*' * 80).format( name=name, sanitizer=sanitizer, engine=fuzzing_engine, architecture=architecture) build_steps.append( # test binaries { 'name': 'gcr.io/{0}/base-runner'.format( base_images_project), 'env': env, 'args': [ 'bash', '-c', 'test_all || (echo "{0}" && false)'.format( failure_msg) ], }) if project_yaml['labels']: # write target labels build_steps.append({ 'name': image, 'env': env, 'args': [ '/usr/local/bin/write_labels.py', json.dumps(project_yaml['labels']), out, ], }) if sanitizer == 'dataflow' and fuzzing_engine == 'dataflow': dataflow_steps = dataflow_post_build_steps( name, env, base_images_project) if dataflow_steps: build_steps.extend(dataflow_steps) else: sys.stderr.write( 'Skipping dataflow post build steps.\n') build_steps.extend([ # generate targets list { 'name': 'gcr.io/{0}/base-runner'.format(base_images_project), 'env': env, 'args': [ 'bash', '-c', 'targets_list > /workspace/{0}'.format( targets_list_filename), ], }, # zip binaries { 'name': image, 'args': [ 'bash', '-c', 'cd {out} && zip -r {zip_file} *'.format( out=out, zip_file=zip_file) ], }, # upload srcmap { 'name': 'gcr.io/{0}/uploader'.format(base_images_project), 'args': [ '/workspace/srcmap.json', srcmap_url, ], }, # upload binaries { 'name': 'gcr.io/{0}/uploader'.format(base_images_project), 'args': [ os.path.join(out, zip_file), upload_url, ], }, # upload targets list { 'name': 'gcr.io/{0}/uploader'.format(base_images_project), 'args': [ '/workspace/{0}'.format(targets_list_filename), targets_list_url, ], }, # upload the latest.version file build_lib.http_upload_step(zip_file, latest_version_url, LATEST_VERSION_CONTENT_TYPE), # cleanup { 'name': image, 'args': [ 'bash', '-c', 'rm -r ' + out, ], }, ]) return build_steps