def _check_regression(self, job: Job) -> bool: # get the regression reports containers for the job results = self.onefuzz.jobs.containers.list( job.job_id, ContainerType.regression_reports) # expect one and only one regression report container if len(results) != 1: raise Exception(f"unexpected regression containers: {results}") container = list(results.keys())[0] # expect one and only one file in the container if len(results[container]) != 1: raise Exception( f"unexpected regression container output: {results}") file = results[container][0] # get the regression report content = self.onefuzz.containers.files.get(Container(container), file) as_str = content.decode() as_obj = json.loads(as_str) report = RegressionReport.parse_obj(as_obj) if report.crash_test_result.crash_report is not None: self.logger.info("regression report has crash report") return True if report.crash_test_result.no_repro is not None: self.logger.info("regression report has no-repro") return False raise Exception(f"unexpected report: {report}")
def _check_regression(self, container: Container, file: File) -> bool: content = self.onefuzz.containers.files.get(Container(container), file) as_str = content.decode() as_obj = json.loads(as_str) report = RegressionReport.parse_obj(as_obj) if report.crash_test_result.crash_report is not None: return True if report.crash_test_result.no_repro is not None: return False raise Exception("invalid crash report")
def parse_report_or_regression( content: Union[str, bytes], file_path: Optional[str] = None, expect_reports: bool = False, ) -> Optional[Union[Report, RegressionReport]]: if isinstance(content, bytes): try: content = content.decode() except UnicodeDecodeError as err: if expect_reports: logging.error(f"unable to parse report ({file_path}): " f"unicode decode of report failed - {err}") return None try: data = json.loads(content) except json.decoder.JSONDecodeError as err: if expect_reports: logging.error( f"unable to parse report ({file_path}): json decoding failed - {err}" ) return None regression_err = None try: regression_report = RegressionReport.parse_obj(data) if (regression_report.crash_test_result is not None and regression_report.crash_test_result.crash_report is not None): regression_report.crash_test_result.crash_report = fix_report_size( content, regression_report.crash_test_result.crash_report) if (regression_report.original_crash_test_result is not None and regression_report.original_crash_test_result.crash_report is not None): regression_report.original_crash_test_result.crash_report = fix_report_size( content, regression_report.original_crash_test_result.crash_report) return regression_report except ValidationError as err: regression_err = err try: report = Report.parse_obj(data) return fix_report_size(content, report) except ValidationError as err: if expect_reports: logging.error( f"unable to parse report ({file_path}) as a report or regression. " f"regression error: {regression_err} report error: {err}") return None
def get_regression_report_task(report: RegressionReport) -> Optional[Task]: # crash_test_result is required, but report & no_repro are not if report.crash_test_result.crash_report: return Task.get( report.crash_test_result.crash_report.job_id, report.crash_test_result.crash_report.task_id, ) if report.crash_test_result.no_repro: return Task.get( report.crash_test_result.no_repro.job_id, report.crash_test_result.no_repro.task_id, ) logging.error( "unable to find crash_report or no_repro entry for report: %s", report.json(include_none=False), ) return None
def main() -> None: if len(sys.argv) < 2: print(f"usage: {__file__} [OUTPUT_FILE]") sys.exit(1) filename = sys.argv[1] task_config = TaskConfig( job_id=UUID(int=0), task=TaskDetails( type=TaskType.libfuzzer_fuzz, duration=1, target_exe="fuzz.exe", target_env={}, target_options=[], ), containers=[ TaskContainers(name=Container("my-setup"), type=ContainerType.setup), TaskContainers(name=Container("my-inputs"), type=ContainerType.inputs), TaskContainers(name=Container("my-crashes"), type=ContainerType.crashes), ], tags={}, ) report = Report( input_blob=BlobRef( account="contoso-storage-account", container=Container("crashes"), name="input.txt", ), executable="fuzz.exe", crash_type="example crash report type", crash_site="example crash site", call_stack=["#0 line", "#1 line", "#2 line"], call_stack_sha256=ZERO_SHA256, input_sha256=EMPTY_SHA256, asan_log="example asan log", task_id=UUID(int=0), job_id=UUID(int=0), scariness_score=10, scariness_description="example-scariness", tool_name="libfuzzer", tool_version="1.2.3", onefuzz_version="1.2.3", ) examples: List[Event] = [ EventPing(ping_id=UUID(int=0)), EventTaskCreated( job_id=UUID(int=0), task_id=UUID(int=0), config=task_config, user_info=UserInfo( application_id=UUID(int=0), object_id=UUID(int=0), upn="*****@*****.**", ), ), EventTaskStopped( job_id=UUID(int=0), task_id=UUID(int=0), user_info=UserInfo( application_id=UUID(int=0), object_id=UUID(int=0), upn="*****@*****.**", ), config=task_config, ), EventTaskFailed( job_id=UUID(int=0), task_id=UUID(int=0), error=Error(code=ErrorCode.TASK_FAILED, errors=["example error message"]), user_info=UserInfo( application_id=UUID(int=0), object_id=UUID(int=0), upn="*****@*****.**", ), config=task_config, ), EventTaskStateUpdated( job_id=UUID(int=0), task_id=UUID(int=0), state=TaskState.init, config=task_config, ), EventProxyCreated(region=Region("eastus"), proxy_id=UUID(int=0)), EventProxyDeleted(region=Region("eastus"), proxy_id=UUID(int=0)), EventProxyFailed( region=Region("eastus"), proxy_id=UUID(int=0), error=Error(code=ErrorCode.PROXY_FAILED, errors=["example error message"]), ), EventProxyStateUpdated( region=Region("eastus"), proxy_id=UUID(int=0), state=VmState.init, ), EventPoolCreated( pool_name=PoolName("example"), os=OS.linux, arch=Architecture.x86_64, managed=True, ), EventPoolDeleted(pool_name=PoolName("example")), EventScalesetCreated( scaleset_id=UUID(int=0), pool_name=PoolName("example"), vm_sku="Standard_D2s_v3", image="Canonical:UbuntuServer:18.04-LTS:latest", region=Region("eastus"), size=10, ), EventScalesetFailed( scaleset_id=UUID(int=0), pool_name=PoolName("example"), error=Error(code=ErrorCode.UNABLE_TO_RESIZE, errors=["example error message"]), ), EventScalesetDeleted(scaleset_id=UUID(int=0), pool_name=PoolName("example")), EventScalesetStateUpdated( scaleset_id=UUID(int=0), pool_name=PoolName("example"), state=ScalesetState.init, ), EventScalesetResizeScheduled(scaleset_id=UUID(int=0), pool_name=PoolName("example"), size=0), EventJobCreated( job_id=UUID(int=0), config=JobConfig( project="example project", name="example name", build="build 1", duration=24, ), ), EventJobStopped( job_id=UUID(int=0), config=JobConfig( project="example project", name="example name", build="build 1", duration=24, ), task_info=[ JobTaskStopped( task_id=UUID(int=0), task_type=TaskType.libfuzzer_fuzz, error=Error(code=ErrorCode.TASK_FAILED, errors=["example error message"]), ), JobTaskStopped( task_id=UUID(int=1), task_type=TaskType.coverage, ), ], ), EventNodeCreated(machine_id=UUID(int=0), pool_name=PoolName("example")), EventNodeDeleted(machine_id=UUID(int=0), pool_name=PoolName("example")), EventNodeStateUpdated( machine_id=UUID(int=0), pool_name=PoolName("example"), state=NodeState.setting_up, ), EventRegressionReported( regression_report=RegressionReport( crash_test_result=CrashTestResult(crash_report=report), original_crash_test_result=CrashTestResult( crash_report=report), ), container=Container("container-name"), filename="example.json", ), EventCrashReported( container=Container("container-name"), filename="example.json", report=report, ), EventFileAdded(container=Container("container-name"), filename="example.txt"), EventNodeHeartbeat(machine_id=UUID(int=0), pool_name=PoolName("example")), EventTaskHeartbeat(task_id=UUID(int=0), job_id=UUID(int=0), config=task_config), EventInstanceConfigUpdated(config=InstanceConfig( admins=[UUID(int=0)], allowed_aad_tenants=[UUID(int=0)])), ] # works around `mypy` not handling that Union has `__args__` for event in getattr(Event, "__args__", []): seen = False for value in examples: if isinstance(value, event): seen = True break assert seen, "missing event type definition: %s" % event.__name__ event_types = [get_event_type(x) for x in examples] for event_type in EventType: assert event_type in event_types, ( "missing event type definition: %s" % event_type.name) message = WebhookMessage( webhook_id=UUID(int=0), event_id=UUID(int=0), event_type=EventType.ping, event=EventPing(ping_id=UUID(int=0)), instance_id=UUID(int=0), instance_name="example", ) message_event_grid = WebhookMessageEventGrid( dataVersion="1.0.0", subject="example", eventType=EventType.ping, eventTime=datetime.datetime.min, id=UUID(int=0), data=EventPing(ping_id=UUID(int=0)), ) message_event_grid_json = json.dumps( [ json.loads( message_event_grid.json( indent=4, exclude_none=True, sort_keys=True)) ], indent=4, sort_keys=True, ) result = "" result += layer( 1, "Webhook Events", "This document describes the basic webhook event subscriptions " "available in OneFuzz", ) result += layer( 2, "Payload", "Each event will be submitted via HTTP POST to the user provided URL.", ) result += typed( 3, "Example", message.json(indent=4, exclude_none=True, sort_keys=True), "json", ) result += layer( 2, "Event Grid Payload format", "If webhook is set to have Event Grid message format then " "the payload will look as follows:", ) result += typed( 3, "Example", message_event_grid_json, "json", ) result += layer(2, "Event Types (EventType)") event_map = {get_event_type(x).name: x for x in examples} for name in sorted(event_map.keys()): result += f"* [{name}](#{name})\n" result += "\n" for name in sorted(event_map.keys()): example = event_map[name] result += layer(3, name) result += typed( 4, "Example", example.json(indent=4, exclude_none=True, sort_keys=True), "json", ) result += typed(4, "Schema", example.schema_json(indent=4, sort_keys=True), "json") result += typed(2, "Full Event Schema", message.schema_json(indent=4, sort_keys=True), "json") with open(filename, "w", newline="\n", encoding="utf8") as handle: handle.write(result)