Esempio n. 1
0
    def test_scaleset_size(self) -> None:
        with self.assertRaises(ValueError):
            Scaleset(
                pool_name=PoolName("test-pool"),
                vm_sku="Standard_D2ds_v4",
                image="Canonical:UbuntuServer:18.04-LTS:latest",
                region=Region("westus2"),
                size=-1,
                spot_instances=False,
            )

        scaleset = Scaleset(
            pool_name=PoolName("test-pool"),
            vm_sku="Standard_D2ds_v4",
            image="Canonical:UbuntuServer:18.04-LTS:latest",
            region=Region("westus2"),
            size=0,
            spot_instances=False,
        )
        self.assertEqual(scaleset.size, 0)

        scaleset = Scaleset(
            pool_name=PoolName("test-pool"),
            vm_sku="Standard_D2ds_v4",
            image="Canonical:UbuntuServer:18.04-LTS:latest",
            region=Region("westus2"),
            size=80,
            spot_instances=False,
        )
        self.assertEqual(scaleset.size, 80)
Esempio n. 2
0
    def test_get_vm_count(self, mock_get_pool: MagicMock) -> None:
        self.assertEqual(get_vm_count([]), 0)

        task_config = TaskConfig(
            job_id=UUID("6b049d51-23e9-4f5c-a5af-ff1f73d0d9e9"),
            containers=[
                TaskContainers(type=ContainerType.inputs,
                               name=Container("test-container"))
            ],
            tags={},
            task=TaskDetails(
                type=TaskType.libfuzzer_fuzz,
                duration=12,
                target_exe="fuzz.exe",
                target_env={},
                target_options=[],
            ),
            pool=TaskPool(count=2, pool_name=PoolName("test-pool")),
        )
        task = Task(
            job_id=UUID("6b049d51-23e9-4f5c-a5af-ff1f73d0d9e9"),
            os=OS.linux,
            config=task_config,
        )
        mock_get_pool.return_value = Pool(
            name=PoolName("test-pool"),
            pool_id=UUID("6b049d51-23e9-4f5c-a5af-ff1f73d0d9e9"),
            os=OS.linux,
            managed=False,
            arch=Architecture.x86_64,
        )
        self.assertEqual(get_vm_count([task]), 2)
Esempio n. 3
0
 def build_tasks(self, size: int) -> List[Task]:
     tasks = []
     for _ in range(size):
         task = Task(
             job_id=UUID(int=0),
             config=TaskConfig(
                 job_id=UUID(int=0),
                 task=TaskDetails(
                     type=TaskType.libfuzzer_fuzz,
                     duration=1,
                     target_exe="fuzz.exe",
                     target_env={},
                     target_options=[],
                 ),
                 pool=TaskPool(pool_name=PoolName("pool"), count=1),
                 containers=[
                     TaskContainers(type=ContainerType.setup,
                                    name=Container("setup"))
                 ],
                 tags={},
                 colocate=True,
             ),
             os=OS.linux,
         )
         tasks.append(task)
     return tasks
Esempio n. 4
0
 def test_autoscale_pool(self,
                         mock_get_tasks_by_pool_name: MagicMock) -> None:
     pool = Pool(
         name=PoolName("test-pool"),
         pool_id=UUID("6b049d51-23e9-4f5c-a5af-ff1f73d0d9e9"),
         os=OS.linux,
         managed=False,
         arch=Architecture.x86_64,
     )
     autoscale_pool(pool=pool)
     mock_get_tasks_by_pool_name.assert_not_called()
Esempio n. 5
0
 def setup(
     self,
     *,
     region: Optional[Region] = None,
     pool_size: int,
     os_list: List[OS],
 ) -> None:
     self.inject_log(self.start_log_marker)
     for entry in os_list:
         name = PoolName(f"testpool-{entry.name}-{self.test_id}")
         self.logger.info("creating pool: %s:%s", entry.name, name)
         self.pools[entry] = self.of.pools.create(name, entry)
         self.logger.info("creating scaleset for pool: %s", name)
         self.of.scalesets.create(name, pool_size, region=region)
Esempio n. 6
0
    def test_user_info_filter(self) -> None:
        job_id = uuid4()
        task_id = uuid4()
        application_id = uuid4()
        object_id = uuid4()
        upn = "*****@*****.**"

        user_info = UserInfo(
            application_id=application_id, object_id=object_id, upn=upn
        )

        task_config = TaskConfig(
            job_id=job_id,
            containers=[
                TaskContainers(
                    type=ContainerType.inputs, name=Container("test-container")
                )
            ],
            tags={},
            task=TaskDetails(
                type=TaskType.libfuzzer_fuzz,
                duration=12,
                target_exe="fuzz.exe",
                target_env={},
                target_options=[],
            ),
            pool=TaskPool(count=2, pool_name=PoolName("test-pool")),
        )

        test_event = EventTaskCreated(
            job_id=job_id,
            task_id=task_id,
            config=task_config,
            user_info=user_info,
        )

        control_test_event = EventTaskCreated(
            job_id=job_id,
            task_id=task_id,
            config=task_config,
            user_info=None,
        )

        test_event_type = get_event_type(test_event)

        scrubbed_test_event = filter_event(test_event, test_event_type)

        self.assertEqual(scrubbed_test_event, control_test_event)
Esempio n. 7
0
 def setup(
     self,
     *,
     region: Optional[Region] = None,
     user_pools: Optional[Dict[str, str]] = None,
 ) -> None:
     for entry in self.os:
         if user_pools and entry.name in user_pools:
             self.logger.info(
                 "using existing pool: %s:%s", entry.name, user_pools[entry.name]
             )
             self.pools[entry] = self.of.pools.get(user_pools[entry.name])
         else:
             name = PoolName("pool-%s-%s" % (self.project, entry.name))
             self.logger.info("creating pool: %s:%s", entry.name, name)
             self.pools[entry] = self.of.pools.create(name, entry)
             self.logger.info("creating scaleset for pool: %s", name)
             self.of.scalesets.create(name, self.pool_size, region=region)
Esempio n. 8
0
    def test_many_buckets(self) -> None:
        tasks = self.build_tasks(100)
        job_id = UUID(int=1)
        for i, task in enumerate(tasks):
            if i % 2 == 0:
                task.job_id = job_id
                task.config.job_id = job_id

            if i % 3 == 0:
                task.os = OS.windows

            if i % 4 == 0:
                task.config.containers[0].name = Container("setup2")

            if i % 5 == 0:
                if task.config.pool:
                    task.config.pool.pool_name = PoolName("alternate-pool")

        buckets = bucket_tasks(tasks)
        self.check_buckets(buckets, tasks, bucket_count=12)
Esempio n. 9
0
    def _run(self, target_os: OS, test_id: UUID, base: Directory,
             target: str) -> None:
        pool = PoolName(f"{target}-{target_os.name}-{test_id}")
        self.onefuzz.pools.create(pool, target_os)
        self.onefuzz.scalesets.create(pool, 5)
        broken = File(os.path.join(base, target, "broken.exe"))
        fixed = File(os.path.join(base, target, "fixed.exe"))

        self.logger.info("starting first build")
        self._run_job(test_id, pool, target, broken, 1)

        self.logger.info("starting second build")
        job = self._run_job(test_id, pool, target, fixed, 2)
        if self._check_regression(job):
            raise Exception("fixed binary should be a no repro")

        self.logger.info("starting third build")
        job = self._run_job(test_id, pool, target, broken, 3)
        if not self._check_regression(job):
            raise Exception("broken binary should be a crash report")

        self.onefuzz.pools.shutdown(pool, now=True)
Esempio n. 10
0
 os=OS.linux,
 job=JobConfig(project="", name=Container(""), build="", duration=1),
 tasks=[
     TaskConfig(
         job_id=(UUID(int=0)),
         task=TaskDetails(
             type=TaskType.generic_supervisor,
             duration=1,
             target_exe="fuzz.exe",
             target_env={},
             target_options=[],
             supervisor_exe="",
             supervisor_options=[],
             supervisor_input_marker="@@",
         ),
         pool=TaskPool(count=1, pool_name=PoolName("")),
         containers=[
             TaskContainers(
                 name=Container("afl-container-name"), type=ContainerType.tools
             ),
             TaskContainers(name=Container(""), type=ContainerType.setup),
             TaskContainers(name=Container(""), type=ContainerType.crashes),
             TaskContainers(name=Container(""), type=ContainerType.inputs),
         ],
         tags={},
     ),
     TaskConfig(
         job_id=UUID(int=0),
         prereq_tasks=[UUID(int=0)],
         task=TaskDetails(
             type=TaskType.generic_crash_report,
Esempio n. 11
0
def main() -> None:
    if len(sys.argv) < 2:
        print(f"usage: {__file__} [OUTPUT_FILE]")
        sys.exit(1)
    filename = sys.argv[1]

    task_config = TaskConfig(
        job_id=UUID(int=0),
        task=TaskDetails(
            type=TaskType.libfuzzer_fuzz,
            duration=1,
            target_exe="fuzz.exe",
            target_env={},
            target_options=[],
        ),
        containers=[
            TaskContainers(name=Container("my-setup"),
                           type=ContainerType.setup),
            TaskContainers(name=Container("my-inputs"),
                           type=ContainerType.inputs),
            TaskContainers(name=Container("my-crashes"),
                           type=ContainerType.crashes),
        ],
        tags={},
    )
    report = Report(
        input_blob=BlobRef(
            account="contoso-storage-account",
            container=Container("crashes"),
            name="input.txt",
        ),
        executable="fuzz.exe",
        crash_type="example crash report type",
        crash_site="example crash site",
        call_stack=["#0 line", "#1 line", "#2 line"],
        call_stack_sha256=ZERO_SHA256,
        input_sha256=EMPTY_SHA256,
        asan_log="example asan log",
        task_id=UUID(int=0),
        job_id=UUID(int=0),
        scariness_score=10,
        scariness_description="example-scariness",
        tool_name="libfuzzer",
        tool_version="1.2.3",
        onefuzz_version="1.2.3",
    )
    examples: List[Event] = [
        EventPing(ping_id=UUID(int=0)),
        EventTaskCreated(
            job_id=UUID(int=0),
            task_id=UUID(int=0),
            config=task_config,
            user_info=UserInfo(
                application_id=UUID(int=0),
                object_id=UUID(int=0),
                upn="*****@*****.**",
            ),
        ),
        EventTaskStopped(
            job_id=UUID(int=0),
            task_id=UUID(int=0),
            user_info=UserInfo(
                application_id=UUID(int=0),
                object_id=UUID(int=0),
                upn="*****@*****.**",
            ),
            config=task_config,
        ),
        EventTaskFailed(
            job_id=UUID(int=0),
            task_id=UUID(int=0),
            error=Error(code=ErrorCode.TASK_FAILED,
                        errors=["example error message"]),
            user_info=UserInfo(
                application_id=UUID(int=0),
                object_id=UUID(int=0),
                upn="*****@*****.**",
            ),
            config=task_config,
        ),
        EventTaskStateUpdated(
            job_id=UUID(int=0),
            task_id=UUID(int=0),
            state=TaskState.init,
            config=task_config,
        ),
        EventProxyCreated(region=Region("eastus"), proxy_id=UUID(int=0)),
        EventProxyDeleted(region=Region("eastus"), proxy_id=UUID(int=0)),
        EventProxyFailed(
            region=Region("eastus"),
            proxy_id=UUID(int=0),
            error=Error(code=ErrorCode.PROXY_FAILED,
                        errors=["example error message"]),
        ),
        EventProxyStateUpdated(
            region=Region("eastus"),
            proxy_id=UUID(int=0),
            state=VmState.init,
        ),
        EventPoolCreated(
            pool_name=PoolName("example"),
            os=OS.linux,
            arch=Architecture.x86_64,
            managed=True,
        ),
        EventPoolDeleted(pool_name=PoolName("example")),
        EventScalesetCreated(
            scaleset_id=UUID(int=0),
            pool_name=PoolName("example"),
            vm_sku="Standard_D2s_v3",
            image="Canonical:UbuntuServer:18.04-LTS:latest",
            region=Region("eastus"),
            size=10,
        ),
        EventScalesetFailed(
            scaleset_id=UUID(int=0),
            pool_name=PoolName("example"),
            error=Error(code=ErrorCode.UNABLE_TO_RESIZE,
                        errors=["example error message"]),
        ),
        EventScalesetDeleted(scaleset_id=UUID(int=0),
                             pool_name=PoolName("example")),
        EventScalesetStateUpdated(
            scaleset_id=UUID(int=0),
            pool_name=PoolName("example"),
            state=ScalesetState.init,
        ),
        EventScalesetResizeScheduled(scaleset_id=UUID(int=0),
                                     pool_name=PoolName("example"),
                                     size=0),
        EventJobCreated(
            job_id=UUID(int=0),
            config=JobConfig(
                project="example project",
                name="example name",
                build="build 1",
                duration=24,
            ),
        ),
        EventJobStopped(
            job_id=UUID(int=0),
            config=JobConfig(
                project="example project",
                name="example name",
                build="build 1",
                duration=24,
            ),
            task_info=[
                JobTaskStopped(
                    task_id=UUID(int=0),
                    task_type=TaskType.libfuzzer_fuzz,
                    error=Error(code=ErrorCode.TASK_FAILED,
                                errors=["example error message"]),
                ),
                JobTaskStopped(
                    task_id=UUID(int=1),
                    task_type=TaskType.coverage,
                ),
            ],
        ),
        EventNodeCreated(machine_id=UUID(int=0),
                         pool_name=PoolName("example")),
        EventNodeDeleted(machine_id=UUID(int=0),
                         pool_name=PoolName("example")),
        EventNodeStateUpdated(
            machine_id=UUID(int=0),
            pool_name=PoolName("example"),
            state=NodeState.setting_up,
        ),
        EventRegressionReported(
            regression_report=RegressionReport(
                crash_test_result=CrashTestResult(crash_report=report),
                original_crash_test_result=CrashTestResult(
                    crash_report=report),
            ),
            container=Container("container-name"),
            filename="example.json",
        ),
        EventCrashReported(
            container=Container("container-name"),
            filename="example.json",
            report=report,
        ),
        EventFileAdded(container=Container("container-name"),
                       filename="example.txt"),
        EventNodeHeartbeat(machine_id=UUID(int=0),
                           pool_name=PoolName("example")),
        EventTaskHeartbeat(task_id=UUID(int=0),
                           job_id=UUID(int=0),
                           config=task_config),
        EventInstanceConfigUpdated(config=InstanceConfig(
            admins=[UUID(int=0)], allowed_aad_tenants=[UUID(int=0)])),
    ]

    # works around `mypy` not handling that Union has `__args__`
    for event in getattr(Event, "__args__", []):
        seen = False
        for value in examples:
            if isinstance(value, event):
                seen = True
                break
        assert seen, "missing event type definition: %s" % event.__name__

    event_types = [get_event_type(x) for x in examples]

    for event_type in EventType:
        assert event_type in event_types, (
            "missing event type definition: %s" % event_type.name)

    message = WebhookMessage(
        webhook_id=UUID(int=0),
        event_id=UUID(int=0),
        event_type=EventType.ping,
        event=EventPing(ping_id=UUID(int=0)),
        instance_id=UUID(int=0),
        instance_name="example",
    )

    message_event_grid = WebhookMessageEventGrid(
        dataVersion="1.0.0",
        subject="example",
        eventType=EventType.ping,
        eventTime=datetime.datetime.min,
        id=UUID(int=0),
        data=EventPing(ping_id=UUID(int=0)),
    )

    message_event_grid_json = json.dumps(
        [
            json.loads(
                message_event_grid.json(
                    indent=4, exclude_none=True, sort_keys=True))
        ],
        indent=4,
        sort_keys=True,
    )

    result = ""
    result += layer(
        1,
        "Webhook Events",
        "This document describes the basic webhook event subscriptions "
        "available in OneFuzz",
    )
    result += layer(
        2,
        "Payload",
        "Each event will be submitted via HTTP POST to the user provided URL.",
    )

    result += typed(
        3,
        "Example",
        message.json(indent=4, exclude_none=True, sort_keys=True),
        "json",
    )

    result += layer(
        2,
        "Event Grid Payload format",
        "If webhook is set to have Event Grid message format then "
        "the payload will look as follows:",
    )

    result += typed(
        3,
        "Example",
        message_event_grid_json,
        "json",
    )

    result += layer(2, "Event Types (EventType)")

    event_map = {get_event_type(x).name: x for x in examples}

    for name in sorted(event_map.keys()):
        result += f"* [{name}](#{name})\n"

    result += "\n"

    for name in sorted(event_map.keys()):
        example = event_map[name]
        result += layer(3, name)
        result += typed(
            4,
            "Example",
            example.json(indent=4, exclude_none=True, sort_keys=True),
            "json",
        )
        result += typed(4, "Schema",
                        example.schema_json(indent=4, sort_keys=True), "json")

    result += typed(2, "Full Event Schema",
                    message.schema_json(indent=4, sort_keys=True), "json")

    with open(filename, "w", newline="\n", encoding="utf8") as handle:
        handle.write(result)
Esempio n. 12
0
def main() -> None:
    task_config = TaskConfig(
        job_id=UUID(int=0),
        task=TaskDetails(
            type=TaskType.libfuzzer_fuzz,
            duration=1,
            target_exe="fuzz.exe",
            target_env={},
            target_options=[],
        ),
        containers=[
            TaskContainers(name=Container("my-setup"), type=ContainerType.setup),
            TaskContainers(name=Container("my-inputs"), type=ContainerType.inputs),
            TaskContainers(name=Container("my-crashes"), type=ContainerType.crashes),
        ],
        tags={},
    )
    examples: List[Event] = [
        EventPing(ping_id=UUID(int=0)),
        EventTaskCreated(
            job_id=UUID(int=0),
            task_id=UUID(int=0),
            config=task_config,
            user_info=UserInfo(
                application_id=UUID(int=0),
                object_id=UUID(int=0),
                upn="*****@*****.**",
            ),
        ),
        EventTaskStopped(
            job_id=UUID(int=0),
            task_id=UUID(int=0),
            user_info=UserInfo(
                application_id=UUID(int=0),
                object_id=UUID(int=0),
                upn="*****@*****.**",
            ),
            config=task_config,
        ),
        EventTaskFailed(
            job_id=UUID(int=0),
            task_id=UUID(int=0),
            error=Error(code=ErrorCode.TASK_FAILED, errors=["example error message"]),
            user_info=UserInfo(
                application_id=UUID(int=0),
                object_id=UUID(int=0),
                upn="*****@*****.**",
            ),
            config=task_config,
        ),
        EventTaskStateUpdated(
            job_id=UUID(int=0),
            task_id=UUID(int=0),
            state=TaskState.init,
            config=task_config,
        ),
        EventProxyCreated(region=Region("eastus")),
        EventProxyDeleted(region=Region("eastus")),
        EventProxyFailed(
            region=Region("eastus"),
            error=Error(code=ErrorCode.PROXY_FAILED, errors=["example error message"]),
        ),
        EventPoolCreated(
            pool_name=PoolName("example"),
            os=OS.linux,
            arch=Architecture.x86_64,
            managed=True,
        ),
        EventPoolDeleted(pool_name=PoolName("example")),
        EventScalesetCreated(
            scaleset_id=UUID(int=0),
            pool_name=PoolName("example"),
            vm_sku="Standard_D2s_v3",
            image="Canonical:UbuntuServer:18.04-LTS:latest",
            region=Region("eastus"),
            size=10,
        ),
        EventScalesetFailed(
            scaleset_id=UUID(int=0),
            pool_name=PoolName("example"),
            error=Error(
                code=ErrorCode.UNABLE_TO_RESIZE, errors=["example error message"]
            ),
        ),
        EventScalesetDeleted(scaleset_id=UUID(int=0), pool_name=PoolName("example")),
        EventJobCreated(
            job_id=UUID(int=0),
            config=JobConfig(
                project="example project",
                name="example name",
                build="build 1",
                duration=24,
            ),
        ),
        EventJobStopped(
            job_id=UUID(int=0),
            config=JobConfig(
                project="example project",
                name="example name",
                build="build 1",
                duration=24,
            ),
            task_info=[
                JobTaskStopped(
                    task_id=UUID(int=0),
                    task_type=TaskType.libfuzzer_fuzz,
                    error=Error(
                        code=ErrorCode.TASK_FAILED, errors=["example error message"]
                    ),
                ),
                JobTaskStopped(
                    task_id=UUID(int=1),
                    task_type=TaskType.libfuzzer_coverage,
                ),
            ],
        ),
        EventNodeCreated(machine_id=UUID(int=0), pool_name=PoolName("example")),
        EventNodeDeleted(machine_id=UUID(int=0), pool_name=PoolName("example")),
        EventNodeStateUpdated(
            machine_id=UUID(int=0),
            pool_name=PoolName("example"),
            state=NodeState.setting_up,
        ),
        EventCrashReported(
            container=Container("container-name"),
            filename="example.json",
            report=Report(
                input_blob=BlobRef(
                    account="contoso-storage-account",
                    container=Container("crashes"),
                    name="input.txt",
                ),
                executable="fuzz.exe",
                crash_type="example crash report type",
                crash_site="example crash site",
                call_stack=["#0 line", "#1 line", "#2 line"],
                call_stack_sha256=ZERO_SHA256,
                input_sha256=EMPTY_SHA256,
                asan_log="example asan log",
                task_id=UUID(int=0),
                job_id=UUID(int=0),
                scariness_score=10,
                scariness_description="example-scariness",
            ),
        ),
        EventFileAdded(container=Container("container-name"), filename="example.txt"),
        EventNodeHeartbeat(machine_id=UUID(int=0), pool_name=PoolName("example")),
        EventTaskHeartbeat(task_id=UUID(int=0), job_id=UUID(int=0), config=task_config),
    ]

    # works around `mypy` not handling that Union has `__args__`
    for event in getattr(Event, "__args__", []):
        seen = False
        for value in examples:
            if isinstance(value, event):
                seen = True
                break
        assert seen, "missing event type definition: %s" % event.__name__

    event_types = [get_event_type(x) for x in examples]

    for event_type in EventType:
        assert event_type in event_types, (
            "missing event type definition: %s" % event_type.name
        )

    message = WebhookMessage(
        webhook_id=UUID(int=0),
        event_id=UUID(int=0),
        event_type=EventType.ping,
        event=EventPing(ping_id=UUID(int=0)),
        instance_id=UUID(int=0),
        instance_name="example",
    )

    layer(
        1,
        "Webhook Events",
        "This document describes the basic webhook event subscriptions "
        "available in OneFuzz",
    )
    layer(
        2,
        "Payload",
        "Each event will be submitted via HTTP POST to the user provided URL.",
    )

    typed(
        3, "Example", message.json(indent=4, exclude_none=True, sort_keys=True), "json"
    )
    layer(2, "Event Types (EventType)")

    event_map = {get_event_type(x).name: x for x in examples}

    for name in sorted(event_map.keys()):
        print(f"* [{name}](#{name})")

    print()

    for name in sorted(event_map.keys()):
        example = event_map[name]
        layer(3, name)
        typed(
            4,
            "Example",
            example.json(indent=4, exclude_none=True, sort_keys=True),
            "json",
        )
        typed(4, "Schema", example.schema_json(indent=4, sort_keys=True), "json")

    typed(2, "Full Event Schema", message.schema_json(indent=4, sort_keys=True), "json")