Ejemplo n.º 1
0
  def test_pull_from_subscription(self):
    """Test pull_from_subscription."""
    self.client.publish(
        self.topic, [
            pubsub.Message(data='123'),
            pubsub.Message(data='123', attributes={'a': '1337'}),
            pubsub.Message(data='456'),
            pubsub.Message(data='456'),
        ])

    messages = self.client.pull_from_subscription(
        self.subscription, max_messages=1)
    self.assertEqual(1, len(messages))
    self.assertEqual('123', messages[0].data)
    self.assertIsNone(messages[0].attributes)

    messages = self.client.pull_from_subscription(
        self.subscription, max_messages=1)
    self.assertEqual(1, len(messages))
    self.assertEqual('123', messages[0].data)
    self.assertDictEqual({'a': '1337'}, messages[0].attributes)

    messages = self.client.pull_from_subscription(
        self.subscription, acknowledge=True)
    self.assertEqual(2, len(messages))

    for message in messages:
      self.assertEqual('456', messages[0].data)

    # Test messages which were not acked in time. They will be re-sent and can
    # be re-pulled.
    time.sleep(ACK_DEADLINE + ACK_DEADLINE_WINDOW)
    messages = self.client.pull_from_subscription(self.subscription)
    self.assertEqual(2, len(messages))
    self.assertItemsEqual([
        {
            'data': 'MTIz',
        },
        {
            'data': 'MTIz',
            'attributes': {
                'a': '1337',
            }
        },
    ], [pubsub._message_to_dict(message) for message in messages])  # pylint: disable=protected-access
Ejemplo n.º 2
0
def _make_bisection_request(pubsub_topic, testcase, target, bisect_type):
    """Make a bisection request to the external bisection service. Returns whether
  or not a request was actually made."""
    if bisect_type == 'fixed':
        old_commit, new_commit = _get_commits(testcase.fixed,
                                              testcase.job_type)
    elif bisect_type == 'regressed':
        old_commit, new_commit = _get_commits(testcase.regression,
                                              testcase.job_type)
    else:
        raise ValueError('Invalid bisection type: ' + bisect_type)

    if not new_commit:
        # old_commit can be empty (i.e. '0' case), but new_commit should never be.
        return False

    old_commit, new_commit = _check_commits(testcase, bisect_type, old_commit,
                                            new_commit)

    reproducer = blobs.read_key(testcase.minimized_keys
                                or testcase.fuzzed_keys)
    pubsub_client = pubsub.PubSubClient()
    pubsub_client.publish(pubsub_topic, [
        pubsub.Message(
            reproducer, {
                'type':
                bisect_type,
                'project_name':
                target.project,
                'sanitizer':
                environment.SANITIZER_NAME_MAP[
                    environment.get_memory_tool_name(testcase.job_type)],
                'fuzz_target':
                target.binary,
                'old_commit':
                old_commit,
                'new_commit':
                new_commit,
                'testcase_id':
                str(testcase.key.id()),
                'issue_id':
                testcase.bug_information,
                'crash_type':
                testcase.crash_type,
                'crash_state':
                testcase.crash_state,
                'security':
                str(testcase.security_flag),
                'severity':
                severity_analyzer.severity_to_string(
                    testcase.security_severity),
                'timestamp':
                testcase.timestamp.isoformat(),
            })
    ])
    return True
Ejemplo n.º 3
0
def _notify_bisection_invalid(pubsub_topic, testcase):
  """Notify the bisection infrastructure of a testcase getting into invalid
  state."""
  pubsub_client = pubsub.PubSubClient()
  pubsub_client.publish(pubsub_topic, [
      pubsub.Message(b'', {
          'type': 'invalid',
          'testcase_id': str(testcase.key.id()),
      })
  ])
Ejemplo n.º 4
0
    def to_pubsub_message(self):
        """Convert the task to a pubsub message."""
        attributes = {
            'command': self.command,
            'argument': str(self.argument),
            'job': self.job,
        }

        if self.eta:
            attributes['eta'] = str(utils.utc_datetime_to_timestamp(self.eta))

        return pubsub.Message(attributes=attributes)
Ejemplo n.º 5
0
    def test_modify_ack_deadline(self):
        """Test modify ACK deadline."""
        self.client.publish(self.topic, [
            pubsub.Message(data='123'),
        ])
        messages = self.client.pull_from_subscription(self.subscription,
                                                      max_messages=1)
        self.assertEqual(1, len(messages))

        # Make message instantly available again.
        messages[0].modify_ack_deadline(0)
        messages = self.client.pull_from_subscription(self.subscription,
                                                      max_messages=1)
        self.assertEqual(1, len(messages))
Ejemplo n.º 6
0
def request_bisection(testcase, bisect_type):
    """Request precise bisection."""
    pubsub_topic = local_config.ProjectConfig().get(
        'bisect_service.pubsub_topic')
    if not pubsub_topic:
        return

    target = testcase.get_fuzz_target()
    if not target:
        return

    if bisect_type == 'fixed':
        old_commit, new_commit = _get_commits(testcase.fixed,
                                              testcase.job_type)
    elif bisect_type == 'regressed':
        old_commit, new_commit = _get_commits(testcase.regression,
                                              testcase.job_type)
    else:
        raise ValueError('Invalid bisection type: ' + bisect_type)

    reproducer = blobs.read_key(testcase.minimized_keys
                                or testcase.fuzzed_keys)
    pubsub_client = pubsub.PubSubClient()
    pubsub_client.publish(
        pubsub_topic,
        pubsub.Message(
            reproducer, {
                'type':
                bisect_type,
                'project_name':
                target.project,
                'sanitizer':
                environment.SANITIZER_NAME_MAP[
                    environment.get_memory_tool_name(testcase.job_type)],
                'fuzz_target':
                target.binary,
                'old_commit':
                old_commit,
                'new_commit':
                new_commit,
                'testcase_id':
                testcase.key.id(),
                'issue_id':
                testcase.bug_information,
                'crash_type':
                testcase.crash_type,
                'security':
                str(testcase.security_flag),
            }))
Ejemplo n.º 7
0
    def test_ack(self):
        """Test a single message ack."""
        self.client.publish(self.topic, [
            pubsub.Message(data='123'),
        ])

        messages = self.client.pull_from_subscription(self.subscription,
                                                      max_messages=1)
        self.assertEqual(1, len(messages))

        # Acknowledging the message means it shouldn't get pulled again.
        messages[0].ack()
        time.sleep(ACK_DEADLINE + ACK_DEADLINE_WINDOW)

        messages = self.client.pull_from_subscription(self.subscription,
                                                      max_messages=1)
        self.assertEqual(0, len(messages))
Ejemplo n.º 8
0
def notify_issue_update(testcase, status):
    """Notify that an issue update occurred (i.e. issue was filed or closed)."""
    topic = local_config.ProjectConfig().get('issue_updates.pubsub_topic')
    if not topic:
        return

    pubsub_client = pubsub.PubSubClient()
    pubsub_client.publish(topic, [
        pubsub.Message(
            attributes={
                'crash_address': testcase.crash_address,
                'crash_state': testcase.crash_state,
                'crash_type': testcase.crash_type,
                'issue_id': testcase.bug_information or '',
                'security': str(testcase.security_flag).lower(),
                'status': status,
                'testcase_id': str(testcase.key.id()),
            })
    ])
Ejemplo n.º 9
0
def _make_bisection_request(pubsub_topic, testcase, target, bisect_type):
    """Make a bisection request to the external bisection service."""
    if bisect_type == 'fixed':
        old_commit, new_commit = _get_commits(testcase.fixed,
                                              testcase.job_type)
    elif bisect_type == 'regressed':
        old_commit, new_commit = _get_commits(testcase.regression,
                                              testcase.job_type)
    else:
        raise ValueError('Invalid bisection type: ' + bisect_type)

    if not old_commit or not new_commit:
        return

    reproducer = blobs.read_key(testcase.minimized_keys
                                or testcase.fuzzed_keys)
    pubsub_client = pubsub.PubSubClient()
    pubsub_client.publish(
        pubsub_topic,
        pubsub.Message(
            reproducer, {
                'type':
                bisect_type,
                'project_name':
                target.project,
                'sanitizer':
                environment.SANITIZER_NAME_MAP[
                    environment.get_memory_tool_name(testcase.job_type)],
                'fuzz_target':
                target.binary,
                'old_commit':
                old_commit,
                'new_commit':
                new_commit,
                'testcase_id':
                testcase.key.id(),
                'issue_id':
                testcase.bug_information,
                'crash_type':
                testcase.crash_type,
                'security':
                str(testcase.security_flag),
            }))
Ejemplo n.º 10
0
def add_external_task(command, testcase_id, job):
    """Add external task."""
    if command != 'progression':
        # Only progression is supported.
        return

    pubsub_client = pubsub.PubSubClient()
    topic_name = job.external_reproduction_topic
    assert topic_name is not None

    testcase = data_handler.get_testcase_by_id(testcase_id)
    fuzz_target = testcase.get_fuzz_target()

    memory_tool_name = environment.get_memory_tool_name(job.name)
    sanitizer = environment.SANITIZER_NAME_MAP.get(memory_tool_name)
    job_environment = job.get_environment()
    if job_environment.get('CUSTOM_BINARY'):
        raise RuntimeError('External jobs should never have custom binaries.')

    build_path = (job_environment.get('RELEASE_BUILD_BUCKET_PATH')
                  or job_environment.get('FUZZ_TARGET_BUILD_BUCKET_PATH'))
    if build_path is None:
        raise RuntimeError(f'{job.name} has no build path defined.')

    min_revision = (testcase.get_metadata('last_tested_revision')
                    or testcase.crash_revision)

    logs.log(f'Publishing external reproduction task for {testcase_id}.')
    attributes = {
        'project': job.project,
        'target': fuzz_target.binary,
        'fuzzer': testcase.fuzzer_name,
        'sanitizer': sanitizer,
        'job': job.name,
        'testcaseId': str(testcase_id),
        'buildPath': build_path,
        'minRevisionAbove': str(min_revision),
    }

    reproducer = blobs.read_key(testcase.minimized_keys
                                or testcase.fuzzed_keys)
    message = pubsub.Message(data=reproducer, attributes=attributes)
    pubsub_client.publish(topic_name, [message])
Ejemplo n.º 11
0
def _prepare_predator_message(testcase):
    """Prepare the json sent to the Predator service for the given test case."""
    result, error_message = _is_predator_testcase(testcase)
    if not result:
        _set_predator_result_with_error(testcase, error_message)
        return None

    crash_revisions_dict, crash_revision_hash = _prepare_component_revisions_dict(
        testcase.crash_revision, testcase.job_type)
    # Do a None check since we can return {} for revision = 0.
    if crash_revisions_dict is None:
        _set_predator_result_with_error(
            testcase, 'Failed to fetch component revisions for revision %s.' %
            testcase.crash_revision)
        return None

    dependency_rolls = []
    start_revision_hash = end_revision_hash = None
    if ':' in testcase.regression:
        regression_parts = testcase.regression.split(':', 1)
        start_revision = int(regression_parts[0])
        end_revision = int(regression_parts[1])

        start_revisions_dict, start_revision_hash = (
            _prepare_component_revisions_dict(start_revision,
                                              testcase.job_type))
        # Do a None check since we can return {} for revision = 0.
        if start_revisions_dict is None:
            _set_predator_result_with_error(
                testcase,
                'Failed to fetch component revisions for revision %s.' %
                start_revision)
            return None

        end_revisions_dict, end_revision_hash = (
            _prepare_component_revisions_dict(end_revision, testcase.job_type))
        # Do a None check since we can return {} for revision = 0.
        if end_revisions_dict is None:
            _set_predator_result_with_error(
                testcase,
                'Failed to fetch component revisions for revision %s.' %
                end_revision)
            return None

        if start_revision != 0:
            dependency_rolls = _compute_rolls(start_revisions_dict,
                                              end_revisions_dict)

    # Put the current revisions dictionary in the format predator expects.
    crash_revision_component_revisions_list = (
        _format_component_revisions_for_predator(crash_revisions_dict))

    # In addition to the start and end revisions, Predator expects the regression
    # range to include the dependency path and repository URL in the same way that
    # they would be included in the dependency rolls. Note that we do not take
    # this from the rolls dict directly as it may not be available.
    src_entry = [
        entry for entry in crash_revision_component_revisions_list
        if entry['dep_path'] == 'src'
    ][0]

    # TODO(mbarbella): This is a hack since ClusterFuzz relies on "src" as a
    # special-cased path, but this is only going to be the correct repository
    # root path some of the time. For certain cases, we must update it.
    repo_url = src_entry['repo_url']
    real_dep_path = SRC_COMPONENT_OVERRIDES.get(repo_url, 'src')
    if real_dep_path != 'src':
        for dependency_list in [
                dependency_rolls, crash_revision_component_revisions_list
        ]:
            for entry in dependency_list:
                if entry['dep_path'] == 'src':
                    entry['dep_path'] = real_dep_path
                    break

    regression_range = {
        'dep_path': real_dep_path,
        'repo_url': repo_url,
        'old_revision': start_revision_hash,
        'new_revision': end_revision_hash,
    }

    crash_stacktrace = _filter_stacktrace(
        data_handler.get_stacktrace(testcase))

    return pubsub.Message(data=json.dumps({
        'stack_trace': crash_stacktrace,
        'crash_revision': crash_revision_hash,
        'customized_data': {
            'regression_range': regression_range,
            'dependency_rolls': dependency_rolls,
            'dependencies': crash_revision_component_revisions_list,
            'crash_type': testcase.crash_type,
            'crash_address': testcase.crash_address,
            'sanitizer': environment.get_memory_tool_name(testcase.job_type),
            'security_flag': testcase.security_flag,
            'job_type': testcase.job_type,
            'testcase_id': testcase.key.id()
        },
        'platform': testcase.platform,
        'client_id': 'clusterfuzz',
        'signature': testcase.crash_state,
    }).encode('utf-8'))