Exemple #1
0
 def is_corpus_dir_same(self):
     """Sets |self.corpus_dir_contents| to the current contents and returns
     True if it is the same as the previous contents."""
     logs.debug('Checking if corpus dir is the same.')
     prev_contents = self.corpus_dir_contents.copy()
     self._set_corpus_dir_contents()
     return prev_contents == self.corpus_dir_contents
Exemple #2
0
def main() -> int:
    """Check that this branch conforms to the standards of fuzzbench."""

    # Use list of tuples so order is preserved.
    command_check_mapping = [
        ('licensecheck', license_check),
        ('format', yapf),
        ('lint', lint),
        ('typecheck', pytype),
        ('test', pytest),
        ('validate_fuzzers_and_benchmarks', validate_fuzzers_and_benchmarks),
        ('validate_experiment_requests', validate_experiment_requests),
        ('test_changed_integrations', test_changed_integrations),
    ]

    args = get_args(command_check_mapping)

    os.chdir(_SRC_ROOT)

    initialize_logs(args.verbose)

    relevant_files = get_relevant_files(args.all_files)

    logs.debug('Running presubmit check(s) on: %s',
               ' '.join(str(path) for path in relevant_files))

    if not args.command:
        # Do default checks.
        success = do_default_checks(relevant_files, command_check_mapping)
        return bool_to_returncode(success)

    success = do_single_check(args.command, relevant_files,
                              command_check_mapping)
    return bool_to_returncode(success)
Exemple #3
0
def main() -> int:
    """Check that this branch conforms to the standards of fuzzbench."""
    parser = argparse.ArgumentParser(
        description='Presubmit script for fuzzbench.')
    choices = [
        'format', 'lint', 'typecheck', 'licensecheck',
        'test_changed_integrations'
    ]
    parser.add_argument(
        'command',
        choices=choices,
        nargs='?',
        help='The presubmit check to run. Defaults to all of them')
    parser.add_argument('--all-files',
                        action='store_true',
                        help='Run presubmit check(s) on all files',
                        default=False)
    parser.add_argument('-v', '--verbose', action='store_true', default=False)

    args = parser.parse_args()

    os.chdir(_SRC_ROOT)

    if not args.verbose:
        logs.initialize()
    else:
        logs.initialize(log_level=logging.DEBUG)

    if not args.all_files:
        relevant_files = [
            Path(path) for path in diff_utils.get_changed_files()
        ]
    else:
        relevant_files = get_all_files()

    relevant_files = filter_ignored_files(relevant_files)

    logs.debug('Running presubmit check(s) on: %s',
               ' '.join(str(path) for path in relevant_files))

    if not args.command:
        success = do_checks(relevant_files)
        return bool_to_returncode(success)

    command_check_mapping = {
        'format': yapf,
        'lint': lint,
        'typecheck': pytype,
        'test_changed_integrations': test_changed_integrations
    }

    check = command_check_mapping[args.command]
    if args.command == 'format':
        success = check(relevant_files, False)
    else:
        success = check(relevant_files)
    if not success:
        print('ERROR: %s failed, see errors above.' % check.__name__)
    return bool_to_returncode(success)
Exemple #4
0
def execute(  # pylint: disable=too-many-locals,too-many-branches
        command: List[str],
        *args,
        expect_zero: bool = True,
        timeout: int = None,
        write_to_stdout=False,
        # If not set, will default to PIPE.
        output_file=None,
        # Not True by default because we can't always set group on processes.
        kill_children: bool = False,
        **kwargs) -> ProcessResult:
    """Execute |command| and return the returncode and the output"""
    if write_to_stdout:
        # Don't set stdout, it's default value None, causes it to be set to
        # stdout.
        assert output_file is None
    elif not output_file:
        output_file = subprocess.PIPE

    kwargs['stdout'] = output_file
    kwargs['stderr'] = subprocess.STDOUT
    if kill_children:
        kwargs['preexec_fn'] = os.setsid

    process = subprocess.Popen(command, *args, **kwargs)
    process_group_id = os.getpgid(process.pid)

    wrapped_process = WrappedPopen(process)
    if timeout is not None:
        kill_thread = _start_kill_thread(wrapped_process, kill_children,
                                         timeout)
    output, _ = process.communicate()

    if timeout is not None:
        kill_thread.cancel()
    elif kill_children:
        # elif because the kill_thread will kill children if needed.
        _kill_process_group(process_group_id)

    retcode = process.returncode

    command_log_str = ' '.join(command)[:LOG_LIMIT_FIELD]
    log_message = 'Executed command: "%s" returned: %d.'

    if output is not None:
        output = output.decode('utf-8', errors='ignore')
        output_for_log = output[-LOG_LIMIT_FIELD:]
        log_extras = {'output': output_for_log}
    else:
        log_extras = None

    if expect_zero and retcode != 0 and not wrapped_process.timed_out:
        logs.error(log_message, command_log_str, retcode, extras=log_extras)
        raise subprocess.CalledProcessError(retcode, command)

    logs.debug(log_message, command_log_str, retcode, extras=log_extras)
    return ProcessResult(retcode, output, wrapped_process.timed_out)
Exemple #5
0
def execute(  # pylint: disable=too-many-locals,too-many-branches
        command: List[str],
        *args,
        expect_zero: bool = True,
        output_files=None,
        timeout: int = None,
        write_to_stdout: bool = True,
        # Not True by default because we can't always set group on processes.
        kill_children: bool = False,
        **kwargs) -> ProcessResult:
    """Execute |command| and return the returncode and the output"""
    if output_files is None:
        output_files = []
    else:
        output_files = output_files[:]
    if write_to_stdout:
        output_files.append(sys.stdout)
    if output_files:
        kwargs['bufsize'] = 1
        kwargs['close_fds'] = 'posix' in sys.builtin_module_names

    kwargs['stdout'] = subprocess.PIPE
    kwargs['stderr'] = subprocess.STDOUT
    if kill_children:
        kwargs['preexec_fn'] = os.setsid

    process = subprocess.Popen(command, *args, **kwargs)
    process_group_id = os.getpgid(process.pid)

    kill_thread = None
    wrapped_process = WrappedPopen(process)
    if timeout is not None:
        kill_thread = _start_kill_thread(wrapped_process, kill_children,
                                         timeout)
    if output_files:
        output = _mirror_output(process, output_files)
    else:
        output, _ = process.communicate()
        output = output.decode('utf-8', errors='ignore')
    process.wait()
    if kill_thread:
        kill_thread.cancel()
    elif kill_children:
        _kill_process_group(process_group_id)
    retcode = process.returncode

    log_message = ('Executed command: "{command}" returned: {retcode}.'.format(
        command=(' '.join(command))[:LOG_LIMIT_FIELD], retcode=retcode))
    output_for_log = output[-LOG_LIMIT_FIELD:]
    log_extras = {'output': output_for_log}

    if expect_zero and retcode != 0 and not wrapped_process.timed_out:
        logs.error(log_message, extras=log_extras)
        raise subprocess.CalledProcessError(retcode, command)

    logs.debug(log_message, extras=log_extras)
    return ProcessResult(retcode, output, wrapped_process.timed_out)
Exemple #6
0
 def sleep_until_next_sync(self):
     """Sleep until it is time to do the next sync."""
     if self.last_sync_time is not None:
         next_sync_time = (self.last_sync_time +
                           experiment_utils.SNAPSHOT_PERIOD)
         sleep_time = next_sync_time - time.time()
         if sleep_time < 0:
             # Log error if a sync has taken longer than SNAPSHOT_PERIOD and
             # messed up our time synchronization.
             logs.warning('Sleep time on cycle %d is %d', self.cycle,
                          sleep_time)
             sleep_time = 0
     else:
         sleep_time = experiment_utils.SNAPSHOT_PERIOD
     logs.debug('Sleeping for %d seconds.', sleep_time)
     time.sleep(sleep_time)
     # last_sync_time is recorded before the sync so that each sync happens
     # roughly SNAPSHOT_PERIOD after each other.
     self.last_sync_time = time.time()
Exemple #7
0
    def do_sync(self, final_sync=False):
        """Save corpus archives and results to GCS."""
        try:
            if not final_sync and self.is_corpus_dir_same():
                logs.debug('Cycle: %d unchanged.', self.cycle)
                filesystem.append(self.unchanged_cycles_path, str(self.cycle))
            else:
                logs.debug('Cycle: %d changed.', self.cycle)
                self.archive_and_save_corpus()

            self.save_results()
            logs.debug('Finished sync.')
        except Exception:  # pylint: disable=broad-except
            logs.error('Failed to sync cycle: %d.', self.cycle)