def checkout(self, source_repo): """Check out a source repo.""" return osv.ensure_updated_checkout( source_repo.repo_url, os.path.join(self._sources_dir, source_repo.name), git_callbacks=self._git_callbacks(source_repo), branch=source_repo.repo_branch)
def process_task(ack_id, message): """Process a task.""" osv.ensure_updated_checkout(OSS_FUZZ_GIT_URL, self._oss_fuzz_dir) clean_artifacts(self._oss_fuzz_dir) # Enforce timeout by doing the work in another thread. done_event = threading.Event() thread = threading.Thread(target=self._do_process_task, args=(subscriber, subscription, ack_id, message, done_event), daemon=True) thread.start() done = done_event.wait(timeout=MAX_LEASE_DURATION) logging.info('Returned from task thread') if not done: self.handle_timeout(subscriber, subscription, ack_id, message) logging.error('Timed out processing task')
def main(): logging.getLogger().addFilter(LogFilter()) logging.getLogger().addHandler(GkeLogHandler()) logging.getLogger().setLevel(logging.INFO) logging.getLogger('google.api_core.bidi').setLevel(logging.ERROR) logging.getLogger('google.cloud.pubsub_v1.subscriber._protocol.' 'streaming_pull_manager').setLevel(logging.ERROR) parser = argparse.ArgumentParser(description='Worker') parser.add_argument('--work_dir', help='Working directory', default=DEFAULT_WORK_DIR) parser.add_argument('--ssh_key_public', help='Public SSH key path') parser.add_argument('--ssh_key_private', help='Private SSH key path') args = parser.parse_args() # Work around kernel bug: https://gvisor.dev/issue/1765 resource.setrlimit(resource.RLIMIT_MEMLOCK, (resource.RLIM_INFINITY, resource.RLIM_INFINITY)) subprocess.call(('service', 'docker', 'start')) oss_fuzz_dir = os.path.join(args.work_dir, 'oss-fuzz') tmp_dir = os.path.join(args.work_dir, 'tmp') os.makedirs(tmp_dir, exist_ok=True) os.environ['TMPDIR'] = tmp_dir # Add oss-fuzz/infra to the import path so we can import from it. sys.path.append(os.path.join(oss_fuzz_dir, 'infra')) osv.ensure_updated_checkout(OSS_FUZZ_GIT_URL, oss_fuzz_dir) ndb_client = ndb.Client() with ndb_client.context(): task_runner = TaskRunner(ndb_client, oss_fuzz_dir, args.work_dir, args.ssh_key_public, args.ssh_key_private) task_runner.loop()
def _source_update(self, message): """Source update.""" source = message.attributes['source'] path = message.attributes['path'] original_sha256 = message.attributes['original_sha256'] deleted = message.attributes['deleted'] == 'true' source_repo = osv.get_source_repository(source) repo = osv.ensure_updated_checkout( source_repo.repo_url, os.path.join(self._sources_dir, source), git_callbacks=self._git_callbacks(source_repo)) yaml_path = os.path.join(osv.repo_path(repo), path) if not os.path.exists(yaml_path): logging.info('%s was deleted.', yaml_path) if deleted: self._handle_deleted(yaml_path) return if deleted: logging.info('Deletion request but source still exists, aborting.') return current_sha256 = osv.sha256(yaml_path) if current_sha256 != original_sha256: logging.warning( 'sha256sum of %s no longer matches (expected=%s vs current=%s).', path, original_sha256, current_sha256) return try: vulnerability = osv.parse_vulnerability(yaml_path) except Exception as e: logging.error('Failed to parse vulnerability %s: %s', yaml_path, e) return self._do_update(source_repo, repo, vulnerability, yaml_path, path, original_sha256)
def _source_update(self, message): """Source update.""" source = message.attributes['source'] path = message.attributes['path'] original_sha256 = message.attributes['original_sha256'] deleted = message.attributes['deleted'] == 'true' source_repo = osv.get_source_repository(source) if source_repo.type == osv.SourceRepositoryType.GIT: repo = osv.ensure_updated_checkout( source_repo.repo_url, os.path.join(self._sources_dir, source), git_callbacks=self._git_callbacks(source_repo), branch=source_repo.repo_branch) vuln_path = os.path.join(osv.repo_path(repo), path) if not os.path.exists(vuln_path): logging.info('%s was deleted.', vuln_path) if deleted: self._handle_deleted(source_repo, path) return if deleted: logging.info( 'Deletion request but source still exists, aborting.') return try: vulnerabilities = osv.parse_vulnerabilities( vuln_path, key_path=source_repo.key_path) except Exception as e: logging.error('Failed to parse vulnerability %s: %s', vuln_path, e) return current_sha256 = osv.sha256(vuln_path) elif source_repo.type == osv.SourceRepositoryType.BUCKET: storage_client = storage.Client() bucket = storage_client.bucket(source_repo.bucket) try: blob = bucket.blob(path).download_as_bytes() except google.cloud.exceptions.NotFound: logging.error('Bucket path %s does not exist.', path) return current_sha256 = osv.sha256_bytes(blob) try: vulnerabilities = osv.parse_vulnerabilities_from_data( blob, extension=os.path.splitext(path)[1], key_path=source_repo.key_path) except Exception as e: logging.error('Failed to parse vulnerability %s: %s', path, e) return repo = None else: raise RuntimeError('Unsupported SourceRepository type.') if current_sha256 != original_sha256: logging.warning( 'sha256sum of %s no longer matches (expected=%s vs current=%s).', path, original_sha256, current_sha256) return for vulnerability in vulnerabilities: self._do_update(source_repo, repo, vulnerability, path, original_sha256)
def setUp(self): self.tmp_dir = tempfile.mkdtemp() self.oss_fuzz_checkout = os.path.join(self.tmp_dir, 'oss-fuzz') osv.ensure_updated_checkout(worker.OSS_FUZZ_GIT_URL, self.oss_fuzz_checkout)