Ejemplo n.º 1
0
    def task(
        self,
        task_id: UUID_EXPANSION,
        *,
        report_container_type: ContainerType = ContainerType.unique_reports,
        crash_name: str = "fake-crash-sample",
    ) -> None:
        """Inject a report into the specified crash reporting task"""

        task = self.onefuzz.tasks.get(task_id)
        crashes = self._get_container(task, ContainerType.crashes)
        reports = self._get_container(task, report_container_type)

        if crashes is None:
            raise Exception("task does not have a crashes container")

        if reports is None:
            raise Exception(
                "task does not have a %s container" % report_container_type.name
            )

        with tempfile.TemporaryDirectory() as tempdir:
            file_path = os.path.join(tempdir, crash_name)
            with open(file_path, "w") as handle:
                handle.write("")
            self.onefuzz.containers.files.upload_file(crashes, file_path, crash_name)

        report = Report(
            input_blob=BlobRef(
                account=self._get_storage_account(crashes),
                container=crashes,
                name=crash_name,
            ),
            executable=task.config.task.target_exe,
            crash_type="fake crash report",
            crash_site="fake crash site",
            call_stack=["#0 fake", "#1 call", "#2 stack"],
            call_stack_sha256=ZERO_SHA256,
            input_sha256=EMPTY_SHA256,
            asan_log="fake asan log",
            task_id=task_id,
            job_id=task.job_id,
            minimized_stack=[],
            minimized_stack_function_names=[],
            tool_name="libfuzzer",
            tool_version="1.2.3",
            onefuzz_version="1.2.3",
        )

        with tempfile.TemporaryDirectory() as tempdir:
            file_path = os.path.join(tempdir, "report.json")
            with open(file_path, "w") as handle:
                handle.write(report.json())

            self.onefuzz.containers.files.upload_file(
                reports, file_path, crash_name + ".json"
            )
Ejemplo n.º 2
0
def parse_report(content: Union[str, bytes],
                 metadata: Optional[str] = None) -> Optional[Report]:
    if isinstance(content, bytes):
        try:
            content = content.decode()
        except UnicodeDecodeError as err:
            logging.error(
                "unable to parse report (%s): unicode decode of report failed - %s",
                metadata,
                err,
            )
            return None

    try:
        data = json.loads(content)
    except json.decoder.JSONDecodeError as err:
        logging.error("unable to parse report (%s): json decoding failed - %s",
                      metadata, err)
        return None

    try:
        entry = Report.parse_obj(data)
    except ValidationError as err:
        logging.error("unable to parse report (%s): %s", metadata, err)
        return None

    return entry
Ejemplo n.º 3
0
 def test_report_no_resize(self) -> None:
     report_path = Path(__file__).parent / "data" / "report.json"
     with open(report_path, "r") as handle:
         content = handle.read()
         data = json.loads(content)
         report = Report.parse_obj(data)
         fixed_report = fix_report_size(content, report)
         self.assertEqual(report, fixed_report)
Ejemplo n.º 4
0
def fix_report_size(
    content: str,
    report: Report,
    acceptable_report_length_kb: int = 24,
    keep_num_entries: int = 10,
    keep_string_len: int = 256,
) -> Report:
    logging.info(f"report content length {getsizeof(content)}")
    if getsizeof(content) > acceptable_report_length_kb * 1024:
        msg = f"report data exceeds {acceptable_report_length_kb}K {getsizeof(content)}"
        if len(report.call_stack) > keep_num_entries:
            msg = msg + "; removing some of stack frames from the report"
            report.call_stack = report.call_stack[0:keep_num_entries] + ["..."]

        if report.asan_log and len(report.asan_log) > keep_string_len:
            msg = msg + "; removing some of asan log entries from the report"
            report.asan_log = report.asan_log[0:keep_string_len] + "..."

        if report.minimized_stack and len(
                report.minimized_stack) > keep_num_entries:
            msg = msg + "; removing some of minimized stack frames from the report"
            report.minimized_stack = report.minimized_stack[
                0:keep_num_entries] + ["..."]

        if (report.minimized_stack_function_names and
                len(report.minimized_stack_function_names) > keep_num_entries):
            msg = (
                msg +
                "; removing some of minimized stack function names from the report"
            )
            report.minimized_stack_function_names = (
                report.minimized_stack_function_names[0:keep_num_entries] +
                ["..."])

        if (report.minimized_stack_function_lines and
                len(report.minimized_stack_function_lines) > keep_num_entries):
            msg = (
                msg +
                "; removing some of minimized stack function lines from the report"
            )
            report.minimized_stack_function_lines = (
                report.minimized_stack_function_lines[0:keep_num_entries] +
                ["..."])

        logging.info(msg)
    return report
Ejemplo n.º 5
0
    def test_sample(self) -> None:
        expected_path = Path(__file__).parent / "data" / "ado-rendered.json"
        with open(expected_path, "r") as handle:
            expected_document = json.load(handle)

        report_path = Path(__file__).parent / "data" / "crash-report-with-html.json"
        with open(report_path, "r") as handle:
            report_raw = json.load(handle)

        ado_path = Path(__file__).parent / "data" / "ado-config.json"
        with open(ado_path, "r") as handle:
            ado_raw = json.load(handle)

        report = Report.parse_obj(report_raw)
        config = ADOTemplate.parse_obj(ado_raw)

        container = Container("containername")
        filename = "test.json"

        job = Job(
            config=JobConfig(project="project", name="name", build="build", duration=1)
        )
        task = Task(
            config=TaskConfig(
                job_id=job.job_id,
                tags={},
                containers=[],
                task=TaskDetails(type=TaskType.libfuzzer_fuzz, duration=1),
            ),
            job_id=job.job_id,
            os=OS.linux,
        )

        renderer = Render(
            container,
            filename,
            report,
            task=task,
            job=job,
            target_url="https://contoso.com/1",
            input_url="https://contoso.com/2",
            report_url="https://contoso.com/3",
        )

        ado = ADO(container, filename, config, report, renderer=renderer)
        work_item_type, document = ado.render_new()
        self.assertEqual(work_item_type, "Bug")

        as_obj = [x.as_dict() for x in document]

        self.assertEqual(as_obj, expected_document)
Ejemplo n.º 6
0
def parse_report_or_regression(
    content: Union[str, bytes],
    file_path: Optional[str] = None,
    expect_reports: bool = False,
) -> Optional[Union[Report, RegressionReport]]:
    if isinstance(content, bytes):
        try:
            content = content.decode()
        except UnicodeDecodeError as err:
            if expect_reports:
                logging.error(f"unable to parse report ({file_path}): "
                              f"unicode decode of report failed - {err}")
            return None

    try:
        data = json.loads(content)
    except json.decoder.JSONDecodeError as err:
        if expect_reports:
            logging.error(
                f"unable to parse report ({file_path}): json decoding failed - {err}"
            )
        return None

    regression_err = None
    try:
        regression_report = RegressionReport.parse_obj(data)

        if (regression_report.crash_test_result is not None and
                regression_report.crash_test_result.crash_report is not None):
            regression_report.crash_test_result.crash_report = fix_report_size(
                content, regression_report.crash_test_result.crash_report)

        if (regression_report.original_crash_test_result is not None
                and regression_report.original_crash_test_result.crash_report
                is not None):
            regression_report.original_crash_test_result.crash_report = fix_report_size(
                content,
                regression_report.original_crash_test_result.crash_report)
        return regression_report
    except ValidationError as err:
        regression_err = err

    try:
        report = Report.parse_obj(data)
        return fix_report_size(content, report)
    except ValidationError as err:
        if expect_reports:
            logging.error(
                f"unable to parse report ({file_path}) as a report or regression. "
                f"regression error: {regression_err} report error: {err}")
        return None
Ejemplo n.º 7
0
 def test_report_resize(self) -> None:
     report_path = Path(__file__).parent / "data" / "report-long.json"
     with open(report_path, "r") as handle:
         content = handle.read()
         data = json.loads(content)
         report = Report.parse_obj(data)
         fixed_report = fix_report_size(content,
                                        report,
                                        acceptable_report_length_kb=10,
                                        keep_num_entries=10)
         self.assertEqual(len(fixed_report.call_stack),
                          11)  # extra item is "..."
         report.call_stack = report.call_stack[0:10] + ["..."]
         self.assertEqual(report, fixed_report)
Ejemplo n.º 8
0
def main() -> None:
    if len(sys.argv) < 2:
        print(f"usage: {__file__} [OUTPUT_FILE]")
        sys.exit(1)
    filename = sys.argv[1]

    task_config = TaskConfig(
        job_id=UUID(int=0),
        task=TaskDetails(
            type=TaskType.libfuzzer_fuzz,
            duration=1,
            target_exe="fuzz.exe",
            target_env={},
            target_options=[],
        ),
        containers=[
            TaskContainers(name=Container("my-setup"),
                           type=ContainerType.setup),
            TaskContainers(name=Container("my-inputs"),
                           type=ContainerType.inputs),
            TaskContainers(name=Container("my-crashes"),
                           type=ContainerType.crashes),
        ],
        tags={},
    )
    report = Report(
        input_blob=BlobRef(
            account="contoso-storage-account",
            container=Container("crashes"),
            name="input.txt",
        ),
        executable="fuzz.exe",
        crash_type="example crash report type",
        crash_site="example crash site",
        call_stack=["#0 line", "#1 line", "#2 line"],
        call_stack_sha256=ZERO_SHA256,
        input_sha256=EMPTY_SHA256,
        asan_log="example asan log",
        task_id=UUID(int=0),
        job_id=UUID(int=0),
        scariness_score=10,
        scariness_description="example-scariness",
        tool_name="libfuzzer",
        tool_version="1.2.3",
        onefuzz_version="1.2.3",
    )
    examples: List[Event] = [
        EventPing(ping_id=UUID(int=0)),
        EventTaskCreated(
            job_id=UUID(int=0),
            task_id=UUID(int=0),
            config=task_config,
            user_info=UserInfo(
                application_id=UUID(int=0),
                object_id=UUID(int=0),
                upn="*****@*****.**",
            ),
        ),
        EventTaskStopped(
            job_id=UUID(int=0),
            task_id=UUID(int=0),
            user_info=UserInfo(
                application_id=UUID(int=0),
                object_id=UUID(int=0),
                upn="*****@*****.**",
            ),
            config=task_config,
        ),
        EventTaskFailed(
            job_id=UUID(int=0),
            task_id=UUID(int=0),
            error=Error(code=ErrorCode.TASK_FAILED,
                        errors=["example error message"]),
            user_info=UserInfo(
                application_id=UUID(int=0),
                object_id=UUID(int=0),
                upn="*****@*****.**",
            ),
            config=task_config,
        ),
        EventTaskStateUpdated(
            job_id=UUID(int=0),
            task_id=UUID(int=0),
            state=TaskState.init,
            config=task_config,
        ),
        EventProxyCreated(region=Region("eastus"), proxy_id=UUID(int=0)),
        EventProxyDeleted(region=Region("eastus"), proxy_id=UUID(int=0)),
        EventProxyFailed(
            region=Region("eastus"),
            proxy_id=UUID(int=0),
            error=Error(code=ErrorCode.PROXY_FAILED,
                        errors=["example error message"]),
        ),
        EventProxyStateUpdated(
            region=Region("eastus"),
            proxy_id=UUID(int=0),
            state=VmState.init,
        ),
        EventPoolCreated(
            pool_name=PoolName("example"),
            os=OS.linux,
            arch=Architecture.x86_64,
            managed=True,
        ),
        EventPoolDeleted(pool_name=PoolName("example")),
        EventScalesetCreated(
            scaleset_id=UUID(int=0),
            pool_name=PoolName("example"),
            vm_sku="Standard_D2s_v3",
            image="Canonical:UbuntuServer:18.04-LTS:latest",
            region=Region("eastus"),
            size=10,
        ),
        EventScalesetFailed(
            scaleset_id=UUID(int=0),
            pool_name=PoolName("example"),
            error=Error(code=ErrorCode.UNABLE_TO_RESIZE,
                        errors=["example error message"]),
        ),
        EventScalesetDeleted(scaleset_id=UUID(int=0),
                             pool_name=PoolName("example")),
        EventScalesetStateUpdated(
            scaleset_id=UUID(int=0),
            pool_name=PoolName("example"),
            state=ScalesetState.init,
        ),
        EventScalesetResizeScheduled(scaleset_id=UUID(int=0),
                                     pool_name=PoolName("example"),
                                     size=0),
        EventJobCreated(
            job_id=UUID(int=0),
            config=JobConfig(
                project="example project",
                name="example name",
                build="build 1",
                duration=24,
            ),
        ),
        EventJobStopped(
            job_id=UUID(int=0),
            config=JobConfig(
                project="example project",
                name="example name",
                build="build 1",
                duration=24,
            ),
            task_info=[
                JobTaskStopped(
                    task_id=UUID(int=0),
                    task_type=TaskType.libfuzzer_fuzz,
                    error=Error(code=ErrorCode.TASK_FAILED,
                                errors=["example error message"]),
                ),
                JobTaskStopped(
                    task_id=UUID(int=1),
                    task_type=TaskType.coverage,
                ),
            ],
        ),
        EventNodeCreated(machine_id=UUID(int=0),
                         pool_name=PoolName("example")),
        EventNodeDeleted(machine_id=UUID(int=0),
                         pool_name=PoolName("example")),
        EventNodeStateUpdated(
            machine_id=UUID(int=0),
            pool_name=PoolName("example"),
            state=NodeState.setting_up,
        ),
        EventRegressionReported(
            regression_report=RegressionReport(
                crash_test_result=CrashTestResult(crash_report=report),
                original_crash_test_result=CrashTestResult(
                    crash_report=report),
            ),
            container=Container("container-name"),
            filename="example.json",
        ),
        EventCrashReported(
            container=Container("container-name"),
            filename="example.json",
            report=report,
        ),
        EventFileAdded(container=Container("container-name"),
                       filename="example.txt"),
        EventNodeHeartbeat(machine_id=UUID(int=0),
                           pool_name=PoolName("example")),
        EventTaskHeartbeat(task_id=UUID(int=0),
                           job_id=UUID(int=0),
                           config=task_config),
        EventInstanceConfigUpdated(config=InstanceConfig(
            admins=[UUID(int=0)], allowed_aad_tenants=[UUID(int=0)])),
    ]

    # works around `mypy` not handling that Union has `__args__`
    for event in getattr(Event, "__args__", []):
        seen = False
        for value in examples:
            if isinstance(value, event):
                seen = True
                break
        assert seen, "missing event type definition: %s" % event.__name__

    event_types = [get_event_type(x) for x in examples]

    for event_type in EventType:
        assert event_type in event_types, (
            "missing event type definition: %s" % event_type.name)

    message = WebhookMessage(
        webhook_id=UUID(int=0),
        event_id=UUID(int=0),
        event_type=EventType.ping,
        event=EventPing(ping_id=UUID(int=0)),
        instance_id=UUID(int=0),
        instance_name="example",
    )

    message_event_grid = WebhookMessageEventGrid(
        dataVersion="1.0.0",
        subject="example",
        eventType=EventType.ping,
        eventTime=datetime.datetime.min,
        id=UUID(int=0),
        data=EventPing(ping_id=UUID(int=0)),
    )

    message_event_grid_json = json.dumps(
        [
            json.loads(
                message_event_grid.json(
                    indent=4, exclude_none=True, sort_keys=True))
        ],
        indent=4,
        sort_keys=True,
    )

    result = ""
    result += layer(
        1,
        "Webhook Events",
        "This document describes the basic webhook event subscriptions "
        "available in OneFuzz",
    )
    result += layer(
        2,
        "Payload",
        "Each event will be submitted via HTTP POST to the user provided URL.",
    )

    result += typed(
        3,
        "Example",
        message.json(indent=4, exclude_none=True, sort_keys=True),
        "json",
    )

    result += layer(
        2,
        "Event Grid Payload format",
        "If webhook is set to have Event Grid message format then "
        "the payload will look as follows:",
    )

    result += typed(
        3,
        "Example",
        message_event_grid_json,
        "json",
    )

    result += layer(2, "Event Types (EventType)")

    event_map = {get_event_type(x).name: x for x in examples}

    for name in sorted(event_map.keys()):
        result += f"* [{name}](#{name})\n"

    result += "\n"

    for name in sorted(event_map.keys()):
        example = event_map[name]
        result += layer(3, name)
        result += typed(
            4,
            "Example",
            example.json(indent=4, exclude_none=True, sort_keys=True),
            "json",
        )
        result += typed(4, "Schema",
                        example.schema_json(indent=4, sort_keys=True), "json")

    result += typed(2, "Full Event Schema",
                    message.schema_json(indent=4, sort_keys=True), "json")

    with open(filename, "w", newline="\n", encoding="utf8") as handle:
        handle.write(result)
Ejemplo n.º 9
0
def main() -> None:
    task_config = TaskConfig(
        job_id=UUID(int=0),
        task=TaskDetails(
            type=TaskType.libfuzzer_fuzz,
            duration=1,
            target_exe="fuzz.exe",
            target_env={},
            target_options=[],
        ),
        containers=[
            TaskContainers(name=Container("my-setup"), type=ContainerType.setup),
            TaskContainers(name=Container("my-inputs"), type=ContainerType.inputs),
            TaskContainers(name=Container("my-crashes"), type=ContainerType.crashes),
        ],
        tags={},
    )
    examples: List[Event] = [
        EventPing(ping_id=UUID(int=0)),
        EventTaskCreated(
            job_id=UUID(int=0),
            task_id=UUID(int=0),
            config=task_config,
            user_info=UserInfo(
                application_id=UUID(int=0),
                object_id=UUID(int=0),
                upn="*****@*****.**",
            ),
        ),
        EventTaskStopped(
            job_id=UUID(int=0),
            task_id=UUID(int=0),
            user_info=UserInfo(
                application_id=UUID(int=0),
                object_id=UUID(int=0),
                upn="*****@*****.**",
            ),
            config=task_config,
        ),
        EventTaskFailed(
            job_id=UUID(int=0),
            task_id=UUID(int=0),
            error=Error(code=ErrorCode.TASK_FAILED, errors=["example error message"]),
            user_info=UserInfo(
                application_id=UUID(int=0),
                object_id=UUID(int=0),
                upn="*****@*****.**",
            ),
            config=task_config,
        ),
        EventTaskStateUpdated(
            job_id=UUID(int=0),
            task_id=UUID(int=0),
            state=TaskState.init,
            config=task_config,
        ),
        EventProxyCreated(region=Region("eastus")),
        EventProxyDeleted(region=Region("eastus")),
        EventProxyFailed(
            region=Region("eastus"),
            error=Error(code=ErrorCode.PROXY_FAILED, errors=["example error message"]),
        ),
        EventPoolCreated(
            pool_name=PoolName("example"),
            os=OS.linux,
            arch=Architecture.x86_64,
            managed=True,
        ),
        EventPoolDeleted(pool_name=PoolName("example")),
        EventScalesetCreated(
            scaleset_id=UUID(int=0),
            pool_name=PoolName("example"),
            vm_sku="Standard_D2s_v3",
            image="Canonical:UbuntuServer:18.04-LTS:latest",
            region=Region("eastus"),
            size=10,
        ),
        EventScalesetFailed(
            scaleset_id=UUID(int=0),
            pool_name=PoolName("example"),
            error=Error(
                code=ErrorCode.UNABLE_TO_RESIZE, errors=["example error message"]
            ),
        ),
        EventScalesetDeleted(scaleset_id=UUID(int=0), pool_name=PoolName("example")),
        EventJobCreated(
            job_id=UUID(int=0),
            config=JobConfig(
                project="example project",
                name="example name",
                build="build 1",
                duration=24,
            ),
        ),
        EventJobStopped(
            job_id=UUID(int=0),
            config=JobConfig(
                project="example project",
                name="example name",
                build="build 1",
                duration=24,
            ),
            task_info=[
                JobTaskStopped(
                    task_id=UUID(int=0),
                    task_type=TaskType.libfuzzer_fuzz,
                    error=Error(
                        code=ErrorCode.TASK_FAILED, errors=["example error message"]
                    ),
                ),
                JobTaskStopped(
                    task_id=UUID(int=1),
                    task_type=TaskType.libfuzzer_coverage,
                ),
            ],
        ),
        EventNodeCreated(machine_id=UUID(int=0), pool_name=PoolName("example")),
        EventNodeDeleted(machine_id=UUID(int=0), pool_name=PoolName("example")),
        EventNodeStateUpdated(
            machine_id=UUID(int=0),
            pool_name=PoolName("example"),
            state=NodeState.setting_up,
        ),
        EventCrashReported(
            container=Container("container-name"),
            filename="example.json",
            report=Report(
                input_blob=BlobRef(
                    account="contoso-storage-account",
                    container=Container("crashes"),
                    name="input.txt",
                ),
                executable="fuzz.exe",
                crash_type="example crash report type",
                crash_site="example crash site",
                call_stack=["#0 line", "#1 line", "#2 line"],
                call_stack_sha256=ZERO_SHA256,
                input_sha256=EMPTY_SHA256,
                asan_log="example asan log",
                task_id=UUID(int=0),
                job_id=UUID(int=0),
                scariness_score=10,
                scariness_description="example-scariness",
            ),
        ),
        EventFileAdded(container=Container("container-name"), filename="example.txt"),
        EventNodeHeartbeat(machine_id=UUID(int=0), pool_name=PoolName("example")),
        EventTaskHeartbeat(task_id=UUID(int=0), job_id=UUID(int=0), config=task_config),
    ]

    # works around `mypy` not handling that Union has `__args__`
    for event in getattr(Event, "__args__", []):
        seen = False
        for value in examples:
            if isinstance(value, event):
                seen = True
                break
        assert seen, "missing event type definition: %s" % event.__name__

    event_types = [get_event_type(x) for x in examples]

    for event_type in EventType:
        assert event_type in event_types, (
            "missing event type definition: %s" % event_type.name
        )

    message = WebhookMessage(
        webhook_id=UUID(int=0),
        event_id=UUID(int=0),
        event_type=EventType.ping,
        event=EventPing(ping_id=UUID(int=0)),
        instance_id=UUID(int=0),
        instance_name="example",
    )

    layer(
        1,
        "Webhook Events",
        "This document describes the basic webhook event subscriptions "
        "available in OneFuzz",
    )
    layer(
        2,
        "Payload",
        "Each event will be submitted via HTTP POST to the user provided URL.",
    )

    typed(
        3, "Example", message.json(indent=4, exclude_none=True, sort_keys=True), "json"
    )
    layer(2, "Event Types (EventType)")

    event_map = {get_event_type(x).name: x for x in examples}

    for name in sorted(event_map.keys()):
        print(f"* [{name}](#{name})")

    print()

    for name in sorted(event_map.keys()):
        example = event_map[name]
        layer(3, name)
        typed(
            4,
            "Example",
            example.json(indent=4, exclude_none=True, sort_keys=True),
            "json",
        )
        typed(4, "Schema", example.schema_json(indent=4, sort_keys=True), "json")

    typed(2, "Full Event Schema", message.schema_json(indent=4, sort_keys=True), "json")