Esempio n. 1
0
def get_experiment_tag_for_image(image_specs, tag_by_experiment=True):
    """Returns the registry with the experiment tag for given image."""
    tag = posixpath.join(experiment_utils.get_base_docker_tag(),
                         image_specs['tag'])
    if tag_by_experiment:
        tag += ':' + experiment_utils.get_experiment_name()
    return tag
Esempio n. 2
0
def get_builder_image_url(benchmark, fuzzer, cloud_project):
    """Get the URL of the docker builder image for fuzzing the benchmark with
    fuzzer."""
    base_tag = experiment_utils.get_base_docker_tag(cloud_project)
    if is_oss_fuzz(benchmark):
        return '{base_tag}/oss-fuzz/builders/{fuzzer}/{project}'.format(
            base_tag=base_tag, fuzzer=fuzzer, project=get_project(benchmark))
    return '{base_tag}/builders/{fuzzer}/{benchmark}'.format(
        base_tag=base_tag, fuzzer=fuzzer, benchmark=benchmark)
Esempio n. 3
0
    def start(self):
        """Start the experiment on the dispatcher."""
        # TODO(metzman): Replace this workflow with a startup script so we don't
        # need to SSH into the dispatcher.
        self.process.join()  # Wait for dispatcher instance.
        # Check that we can SSH into the instance.
        gcloud.robust_begin_gcloud_ssh(self.instance_name,
                                       self.config['cloud_compute_zone'])

        base_docker_tag = experiment_utils.get_base_docker_tag(
            self.config['cloud_project'])
        cloud_sql_instance_connection_name = (
            self.config['cloud_sql_instance_connection_name'])

        command = (
            'echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope && '
            'docker run --rm '
            '-e INSTANCE_NAME="{instance_name}" '
            '-e EXPERIMENT="{experiment}" '
            '-e CLOUD_PROJECT="{cloud_project}" '
            '-e CLOUD_EXPERIMENT_BUCKET="{cloud_experiment_bucket}" '
            '-e POSTGRES_PASSWORD="******" '
            '-e CLOUD_SQL_INSTANCE_CONNECTION_NAME='
            '"{cloud_sql_instance_connection_name}" '
            '--cap-add=SYS_PTRACE --cap-add=SYS_NICE '
            '-v /var/run/docker.sock:/var/run/docker.sock '
            '--name=dispatcher-container '
            '{base_docker_tag}/dispatcher-image '
            '/work/startup-dispatcher.sh'
        ).format(
            instance_name=self.instance_name,
            postgres_password=os.environ['POSTGRES_PASSWORD'],
            experiment=self.config['experiment'],
            # TODO(metzman): Create a function that sets env vars based on
            # the contents of a dictionary, and use it instead of hardcoding
            # the configs we use.
            cloud_project=self.config['cloud_project'],
            cloud_experiment_bucket=self.config['cloud_experiment_bucket'],
            cloud_sql_instance_connection_name=(
                cloud_sql_instance_connection_name),
            base_docker_tag=base_docker_tag,
        )
        return gcloud.ssh(self.instance_name,
                          command=command,
                          zone=self.config['cloud_compute_zone'])
Esempio n. 4
0
def _build(
        config_file: str,
        config_name: str,
        substitutions: Dict[str, str] = None,
        timeout_seconds: int = GCB_BUILD_TIMEOUT) -> new_process.ProcessResult:
    """Build each of |args| on gcb."""
    config_arg = '--config=%s' % config_file
    machine_type_arg = '--machine-type=%s' % GCB_MACHINE_TYPE

    # Use "s" suffix to denote seconds.
    timeout_arg = '--timeout=%ds' % timeout_seconds

    command = [
        'gcloud',
        'builds',
        'submit',
        str(utils.ROOT_DIR),
        config_arg,
        timeout_arg,
        machine_type_arg,
    ]

    if substitutions is None:
        substitutions = {}

    assert '_REPO' not in substitutions
    substitutions['_REPO'] = experiment_utils.get_base_docker_tag()

    assert '_EXPERIMENT' not in substitutions
    substitutions['_EXPERIMENT'] = experiment_utils.get_experiment_name()

    substitutions = [
        '%s=%s' % (key, value) for key, value in substitutions.items()
    ]
    substitutions = ','.join(substitutions)
    command.append('--substitutions=%s' % substitutions)

    # Don't write to stdout to make concurrent building faster. Otherwise
    # writing becomes the bottleneck.
    result = new_process.execute(command,
                                 write_to_stdout=False,
                                 kill_children=True,
                                 timeout=timeout_seconds)
    build_utils.store_build_logs(config_name, result)
    return result
Esempio n. 5
0
def coverage_steps(benchmark):
    """Returns GCB run steps for coverage builds."""
    coverage_binaries_dir = exp_path.filestore(
        build_utils.get_coverage_binaries_dir())
    steps = [{
        'name':
            DOCKER_IMAGE,
        'args': [
            'run', '-v', '/workspace/out:/host-out',
            posixpath.join(experiment_utils.get_base_docker_tag(), 'builders',
                           'coverage', benchmark) + ':' +
            experiment_utils.get_experiment_name(), '/bin/bash', '-c',
            'cd /out; tar -czvf /host-out/coverage-build-' + benchmark +
            '.tar.gz * /src /work'
        ]
    }]
    step = {'name': 'gcr.io/cloud-builders/gsutil'}
    step['args'] = [
        '-m', 'cp', '/workspace/out/coverage-build-' + benchmark + '.tar.gz',
        coverage_binaries_dir + '/'
    ]
    steps.append(step)
    return steps
Esempio n. 6
0
    def start(self):
        """Start the experiment on the dispatcher."""
        shared_volume_dir = os.path.abspath('shared-volume')
        if not os.path.exists(shared_volume_dir):
            os.mkdir(shared_volume_dir)
        shared_volume_volume_arg = '{0}:{0}'.format(shared_volume_dir)
        shared_volume_env_arg = 'SHARED_VOLUME={}'.format(shared_volume_dir)
        sql_database_arg = 'SQL_DATABASE_URL=sqlite:///{}'.format(
            os.path.join(shared_volume_dir, 'local.db'))

        home = os.environ['HOME']
        host_gcloud_config_arg = (
            'HOST_GCLOUD_CONFIG={home}/{gcloud_config_dir}'.format(
                home=home, gcloud_config_dir='.config/gcloud'))

        base_docker_tag = experiment_utils.get_base_docker_tag(
            self.config['cloud_project'])
        set_instance_name_arg = 'INSTANCE_NAME={instance_name}'.format(
            instance_name=self.instance_name)
        set_experiment_arg = 'EXPERIMENT={experiment}'.format(
            experiment=self.config['experiment'])
        set_cloud_project_arg = 'CLOUD_PROJECT={cloud_project}'.format(
            cloud_project=self.config['cloud_project'])
        set_cloud_experiment_bucket_arg = (
            'CLOUD_EXPERIMENT_BUCKET={cloud_experiment_bucket}'.format(
                cloud_experiment_bucket=self.config['cloud_experiment_bucket']))
        docker_image_url = '{base_docker_tag}/dispatcher-image'.format(
            base_docker_tag=base_docker_tag)
        volume_arg = '{home}/.config/gcloud:/root/.config/gcloud'.format(
            home=home)
        command = [
            'docker',
            'run',
            '-ti',
            '--rm',
            '-v',
            volume_arg,
            '-v',
            '/var/run/docker.sock:/var/run/docker.sock',
            '-v',
            shared_volume_volume_arg,
            '-e',
            shared_volume_env_arg,
            '-e',
            host_gcloud_config_arg,
            '-e',
            set_instance_name_arg,
            '-e',
            set_experiment_arg,
            '-e',
            set_cloud_project_arg,
            '-e',
            sql_database_arg,
            '-e',
            set_cloud_experiment_bucket_arg,
            '-e',
            'LOCAL_EXPERIMENT=True',
            '--cap-add=SYS_PTRACE',
            '--cap-add=SYS_NICE',
            '--name=dispatcher-container',
            docker_image_url,
            '/bin/bash',
            '-c',
            'gsutil -m rsync -r '
            '"${CLOUD_EXPERIMENT_BUCKET}/${EXPERIMENT}/input" ${WORK} && '
            'source "/work/.venv/bin/activate" && '
            'pip3 install -r "/work/src/requirements.txt" && '
            'PYTHONPATH=/work/src python3 '
            '/work/src/experiment/dispatcher.py || '
            '/bin/bash'  # Open shell if experiment fails.
        ]
        return new_process.execute(command, write_to_stdout=True)
Esempio n. 7
0
def get_runner_image_url(benchmark, fuzzer, cloud_project):
    """Get the URL of the docker runner image for fuzzing the benchmark with
    fuzzer."""
    base_tag = experiment_utils.get_base_docker_tag(cloud_project)
    return '{base_tag}/runners/{fuzzer}/{benchmark}'.format(
        base_tag=base_tag, fuzzer=fuzzer, benchmark=benchmark)
Esempio n. 8
0
    def start(self):
        """Start the experiment on the dispatcher."""
        shared_volume_dir = os.path.abspath('shared-volume')
        if not os.path.exists(shared_volume_dir):
            os.mkdir(shared_volume_dir)
        shared_volume_volume_arg = '{0}:{0}'.format(shared_volume_dir)
        shared_volume_env_arg = 'SHARED_VOLUME={}'.format(shared_volume_dir)
        sql_database_arg = 'SQL_DATABASE_URL=sqlite:///{}'.format(
            os.path.join(shared_volume_dir, 'local.db'))

        base_docker_tag = experiment_utils.get_base_docker_tag(
            self.config['cloud_project'])
        set_instance_name_arg = 'INSTANCE_NAME={instance_name}'.format(
            instance_name=self.instance_name)
        set_experiment_arg = 'EXPERIMENT={experiment}'.format(
            experiment=self.config['experiment'])
        set_cloud_project_arg = 'CLOUD_PROJECT={cloud_project}'.format(
            cloud_project=self.config['cloud_project'])
        shared_experiment_filestore_arg = '{0}:{0}'.format(
            self.config['experiment_filestore'])
        set_experiment_filestore_arg = (
            'EXPERIMENT_FILESTORE={experiment_filestore}'.format(
                experiment_filestore=self.config['experiment_filestore']))
        shared_report_filestore_arg = '{0}:{0}'.format(
            self.config['report_filestore'])
        set_report_filestore_arg = (
            'REPORT_FILESTORE={report_filestore}'.format(
                report_filestore=self.config['report_filestore']))
        docker_image_url = '{base_docker_tag}/dispatcher-image'.format(
            base_docker_tag=base_docker_tag)
        command = [
            'docker',
            'run',
            '-ti',
            '--rm',
            '-v',
            '/var/run/docker.sock:/var/run/docker.sock',
            '-v',
            shared_volume_volume_arg,
            '-v',
            shared_experiment_filestore_arg,
            '-v',
            shared_report_filestore_arg,
            '-e',
            shared_volume_env_arg,
            '-e',
            set_instance_name_arg,
            '-e',
            set_experiment_arg,
            '-e',
            set_cloud_project_arg,
            '-e',
            sql_database_arg,
            '-e',
            set_experiment_filestore_arg,
            '-e',
            set_report_filestore_arg,
            '-e',
            'LOCAL_EXPERIMENT=True',
            '--cap-add=SYS_PTRACE',
            '--cap-add=SYS_NICE',
            '--name=dispatcher-container',
            docker_image_url,
            '/bin/bash',
            '-c',
            'rsync -r '
            '"${EXPERIMENT_FILESTORE}/${EXPERIMENT}/input/" ${WORK} && '
            'mkdir ${WORK}/src && '
            'tar -xvzf ${WORK}/src.tar.gz -C ${WORK}/src && '
            'source "${WORK}/.venv/bin/activate" && '
            'pip3 install -r "${WORK}/src/requirements.txt" && '
            'PYTHONPATH=${WORK}/src python3 '
            '${WORK}/src/experiment/dispatcher.py || '
            '/bin/bash'  # Open shell if experiment fails.
        ]
        return new_process.execute(command, write_to_stdout=True)