def build_tasks(self, parent_task_id, env=None): """Create fuzzing tasks and attach them to a decision task""" now = datetime.utcnow() for pool in self.iterpools(): for i in range(1, pool.tasks + 1): task = yaml.safe_load( FUZZING_TASK.substitute( created=stringDate(now), deadline=stringDate(now + timedelta( seconds=pool.max_run_time)), description=DESCRIPTION.replace("\n", "\\n"), expires=stringDate(fromNow("1 week", now)), max_run_time=pool.max_run_time, name=(f"Fuzzing task {pool.platform}-{pool.pool_id} - " f"{i}/{pool.tasks}"), owner_email=OWNER_EMAIL, pool_id=pool.pool_id, provisioner=PROVISIONER_ID, scheduler=SCHEDULER_ID, secret=DECISION_TASK_SECRET, task_group=parent_task_id, task_id=self.task_id, )) task["payload"]["artifacts"].update( pool.artifact_map(stringDate(fromNow("1 week", now)))) # `container` can be either a string or a dict, so can't template it task["payload"]["image"] = pool.container task["scopes"] = sorted(chain(pool.scopes, task["scopes"])) add_capabilities_for_scopes(task) if env is not None: assert set(task["payload"]["env"]).isdisjoint(set(env)) task["payload"]["env"].update(env) yield slugId(), task
def create_task(**kwargs): """ Create a TC task. NOTE: This code needs to be tested for normal TC tasks to determine if the default values would also work for non BBB tasks. """ task_id = kwargs.get('taskId', slugId()) task_definition = { 'taskId': task_id, # Do not retry the task if it fails to run successfully 'reruns': kwargs.get('reruns', 0), 'task': { 'workerType': kwargs['workerType'], # mandatory 'provisionerId': kwargs['provisionerId'], # mandatory 'created': kwargs.get('created', fromNow('0d')), 'deadline': kwargs.get('deadline', fromNow('1d')), 'expires': kwargs.get('deadline', fromNow('1d')), 'payload': kwargs.get('payload', {}), 'metadata': kwargs['metadata'], # mandatory 'schedulerId': kwargs.get('schedulerId', 'task-graph-scheduler'), 'tags': kwargs.get('tags', {}), 'extra': kwargs.get('extra', {}), 'routes': kwargs.get('routes', []), 'priority': kwargs.get('priority', 'normal'), 'retries': kwargs.get('retries', 5), 'scopes': kwargs.get('scopes', []), } } if kwargs.get('taskGroupId'): task_definition['task']['taskGroupId'] = kwargs.get('taskGroupId', task_id), return task_definition
def create_task(**kwargs): """ Create a TC task. NOTE: This code needs to be tested for normal TC tasks to determine if the default values would also work for non BBB tasks. """ task_id = kwargs.get('taskId', slugId()) task_definition = { 'taskId': task_id, # Do not retry the task if it fails to run successfully 'reruns': kwargs.get('reruns', 0), 'task': { 'workerType': kwargs['workerType'], # mandatory 'provisionerId': kwargs['provisionerId'], # mandatory 'created': kwargs.get('created', fromNow('0d')), 'deadline': kwargs.get('deadline', fromNow('1d')), 'expires': kwargs.get('deadline', fromNow('1d')), 'payload': kwargs.get('payload', {}), 'metadata': kwargs['metadata'], # mandatory 'schedulerId': kwargs.get('schedulerId', 'task-graph-scheduler'), 'tags': kwargs.get('tags', {}), 'extra': kwargs.get('extra', {}), 'routes': kwargs.get('routes', []), 'priority': kwargs.get('priority', 'normal'), 'retries': kwargs.get('retries', 5), 'scopes': kwargs.get('scopes', []), } } if kwargs.get('taskGroupId'): task_definition['task']['taskGroupId'] = kwargs.get( 'taskGroupId', task_id), return task_definition
def main(): parser = ArgumentParser() parser.add_argument('--disable', action='store_true', dest='quarantine', help='disable the workers for 1000 years', default=True) parser.add_argument('--enable', action='store_false', dest='quarantine', help='enable the workers') parser.add_argument('-p', '--provisioner', required=True) parser.add_argument('-w', '--worker-type', required=True) parser.add_argument('-g', '--worker-group', required=True) parser.add_argument('workers', nargs='+', help='worker ids') args = parser.parse_args() if args.quarantine: quarantineUntil = fromNow('1000 years') else: quarantineUntil = fromNow('-1 hours') q = Queue() for worker_id in args.workers: res = q.quarantineWorker(args.provisioner, args. worker_type, args.worker_group, worker_id, payload={'quarantineUntil': quarantineUntil }) if 'quarantineUntil' in res: print('{0[workerId]} quarantined until {0[quarantineUntil]}'.format(res)) else: print('{0[workerId]} not quarantined'.format(res))
def build_tasks(self, parent_task_id: str, env: Optional[Dict[str, str]] = None): """Create fuzzing tasks and attach them to a decision task""" now = datetime.utcnow() preprocess_task_id = None preprocess = cast(PoolConfiguration, self.create_preprocess()) if preprocess is not None: assert preprocess.max_run_time is not None task = yaml.safe_load( FUZZING_TASK.substitute( created=stringDate(now), deadline=stringDate( now + timedelta(seconds=preprocess.max_run_time) ), description=DESCRIPTION.replace("\n", "\\n"), expires=stringDate(fromNow("4 weeks", now)), max_run_time=preprocess.max_run_time, name=f"Fuzzing task {self.task_id} - preprocess", owner_email=OWNER_EMAIL, pool_id=self.pool_id, provisioner=PROVISIONER_ID, scheduler=SCHEDULER_ID, secret=DECISION_TASK_SECRET, task_group=parent_task_id, task_id=self.task_id, ) ) task["payload"]["env"]["TASKCLUSTER_FUZZING_PREPROCESS"] = "1" configure_task(task, preprocess, now, env) preprocess_task_id = slugId() yield preprocess_task_id, task assert self.max_run_time is not None assert self.tasks is not None for i in range(1, self.tasks + 1): task = yaml.safe_load( FUZZING_TASK.substitute( created=stringDate(now), deadline=stringDate(now + timedelta(seconds=self.max_run_time)), description=DESCRIPTION.replace("\n", "\\n"), expires=stringDate(fromNow("4 weeks", now)), max_run_time=self.max_run_time, name=f"Fuzzing task {self.task_id} - {i}/{self.tasks}", owner_email=OWNER_EMAIL, pool_id=self.pool_id, provisioner=PROVISIONER_ID, scheduler=SCHEDULER_ID, secret=DECISION_TASK_SECRET, task_group=parent_task_id, task_id=self.task_id, ) ) if preprocess_task_id is not None: task["dependencies"].append(preprocess_task_id) configure_task(task, self, now, env) yield slugId(), task
def _create_task(buildername, repo_name, revision, metadata, requires=None): """Return takcluster task to trigger a buildbot builder. This function creates a generic task with the minimum amount of information required for the buildbot-bridge to consider it valid. You can establish a list dependencies to other tasks through the requires field. :param buildername: The name of a buildbot builder. :type buildername: str :param repo_name The name of a repository e.g. mozilla-inbound, alder et al. :type repo_name: str :param revision: Changeset ID of a revision. :type revision: str :param metadata: Dictionary with metadata values about the task. :type metadata: str :param requires: List of taskIds of other tasks which this task depends on. :type requires: str :returns: TaskCluster graph :rtype: dict """ task = { 'taskId': slugId(), 'reruns': 0, # Do not retry the task if it fails to run successfuly 'task': { 'workerType': 'buildbot-bridge', 'provisionerId': 'buildbot-bridge', # XXX: check if tc client has something more like now 'created': fromNow('0d'), 'deadline': fromNow('1d'), 'payload': { 'buildername': buildername, 'sourcestamp': { 'branch': repo_name, 'revision': revision }, # Needed because of bug 1195751 'properties': { 'product': get_builder_information(buildername)['properties'] ['product'], 'who': metadata['owner'] } }, 'metadata': dict(metadata.items() + {'name': buildername}.items()), } } if requires: task['requires'] = requires return task
def build_tasks(self, parent_task_id, env=None): """Create fuzzing tasks and attach them to a decision task""" now = datetime.utcnow() for i in range(1, self.tasks + 1): task_id = slugId() task = { "taskGroupId": parent_task_id, "dependencies": [parent_task_id], "created": stringDate(now), "deadline": stringDate(now + timedelta(seconds=self.cycle_time)), "expires": stringDate(fromNow("1 month", now)), "extra": {}, "metadata": { "description": DESCRIPTION, "name": f"Fuzzing task {self.id} - {i}/{self.tasks}", "owner": OWNER_EMAIL, "source": "https://github.com/MozillaSecurity/fuzzing-tc", }, "payload": { "artifacts": { "project/fuzzing/private/logs": { "expires": stringDate(fromNow("1 month", now)), "path": "/logs/", "type": "directory", } }, "cache": {}, "capabilities": {}, "env": { "TASKCLUSTER_FUZZING_POOL": self.filename }, "features": { "taskclusterProxy": True }, "image": self.container, "maxRunTime": self.cycle_time, }, "priority": "high", "provisionerId": PROVISIONER_ID, "workerType": self.id, "retries": 1, "routes": [], "schedulerId": SCHEDULER_ID, "scopes": self.scopes, "tags": {}, } if env is not None: assert set(task["payload"]["env"]).isdisjoint(set(env)) task["payload"]["env"].update(env) yield task_id, task
def _create_task(buildername, repo_name, revision, metadata, requires=None): """Return takcluster task to trigger a buildbot builder. This function creates a generic task with the minimum amount of information required for the buildbot-bridge to consider it valid. You can establish a list dependencies to other tasks through the requires field. :param buildername: The name of a buildbot builder. :type buildername: str :param repo_name The name of a repository e.g. mozilla-inbound, alder et al. :type repo_name: str :param revision: Changeset ID of a revision. :type revision: str :param metadata: Dictionary with metadata values about the task. :type metadata: str :param requires: List of taskIds of other tasks which this task depends on. :type requires: str :returns: TaskCluster graph :rtype: dict """ task = { 'taskId': slugId(), 'reruns': 0, # Do not retry the task if it fails to run successfuly 'task': { 'workerType': 'buildbot-bridge', 'provisionerId': 'buildbot-bridge', # XXX: check if tc client has something more like now 'created': fromNow('0d'), 'deadline': fromNow('1d'), 'payload': { 'buildername': buildername, 'sourcestamp': { 'branch': repo_name, 'revision': revision }, # Needed because of bug 1195751 'properties': { 'product': get_builder_information(buildername)['properties']['product'], 'who': metadata['owner'] } }, 'metadata': dict(metadata.items() + {'name': buildername}.items()), } } if requires: task['requires'] = requires return task
def build_tasks(self, parent_task_id: str, env: Optional[Dict[str, str]] = None): """Create fuzzing tasks and attach them to a decision task""" now = datetime.utcnow() for pool in self.iterpools(): assert pool.max_run_time is not None assert pool.tasks is not None for i in range(1, pool.tasks + 1): task = yaml.safe_load( FUZZING_TASK.substitute( created=stringDate(now), deadline=stringDate(now + timedelta(seconds=pool.max_run_time)), description=DESCRIPTION.replace("\n", "\\n"), expires=stringDate(fromNow("4 weeks", now)), max_run_time=pool.max_run_time, name=( f"Fuzzing task {pool.platform}-{pool.pool_id} - " f"{i}/{pool.tasks}" ), owner_email=OWNER_EMAIL, pool_id=pool.pool_id, provisioner=PROVISIONER_ID, scheduler=SCHEDULER_ID, secret=DECISION_TASK_SECRET, task_group=parent_task_id, task_id=self.task_id, ) ) configure_task(task, cast(PoolConfiguration, pool), now, env) yield slugId(), task
def configure_task( task: Dict[str, Any], config: "PoolConfiguration", now: datetime, env: Optional[Dict[str, str]], ) -> None: task["payload"]["artifacts"].update( config.artifact_map(stringDate(fromNow("4 weeks", now))) ) task["scopes"] = sorted(chain(config.get_scopes(), task["scopes"])) add_capabilities_for_scopes(task) add_task_image(task, config) if config.platform == "windows": task["payload"]["env"]["MSYSTEM"] = "MINGW64" task["payload"]["command"] = [ "set HOME=%CD%", "set ARTIFACTS=%CD%", "set PATH=" + ";".join( [ r"%CD%\msys64\opt\python", r"%CD%\msys64\opt\python\Scripts", r"%CD%\msys64\MINGW64\bin", r"%CD%\msys64\usr\bin", "%PATH%", ] ), "fuzzing-pool-launch", ] if config.run_as_admin: task["payload"].setdefault("osGroups", []) task["payload"]["osGroups"].append("Administrators") task["payload"]["features"]["runAsAdministrator"] = True elif config.platform == "macos": task["payload"]["command"] = [ [ "/bin/bash", "-c", "-x", 'eval "$(homebrew/bin/brew shellenv)" && exec fuzzing-pool-launch', ], ] if config.platform in {"macos", "windows"}: # translate artifacts from dict to array for generic-worker task["payload"]["artifacts"] = [ # `... or artifact` because dict.update returns None artifact.update({"name": name}) or artifact for name, artifact in task["payload"]["artifacts"].items() ] if env is not None: assert set(task["payload"]["env"]).isdisjoint(set(env)) task["payload"]["env"].update(env)
def task(self): """Task definition""" now = datetime.utcnow() dependencies = [self.parent_id] if self.dependency is not None: dependencies.append(self.dependency) return { "taskGroupId": self.parent_id, "dependencies": dependencies, "created": stringDate(now), "deadline": stringDate(now + timedelta(seconds=MAX_RUNTIME)), "expires": stringDate(fromNow("1 week", now)), "provisionerId": "proj-fuzzing", "metadata": { "description": "", "name": f"{self.TASK_NAME} ({self.bug_id})", "owner": "*****@*****.**", "source": "https://github.com/MozillaSecurity/bugmon", }, "payload": { "artifacts": { "project/fuzzing/bugmon": { "path": "/bugmon-artifacts/", "type": "directory", } }, "cache": {}, "capabilities": { "devices": { "hostSharedMemory": True, "loopbackAudio": True } }, "env": self.env, "features": { "taskclusterProxy": True }, "image": "mozillasecurity/bugmon:latest", "maxRunTime": MAX_RUNTIME, }, "priority": "high", "workerType": self.WORKER_TYPE, "retries": 5, "routes": ["[email protected]"], "schedulerId": "-", "scopes": self.scopes, "tags": {}, }
def create_task(repo_name, revision, **kwargs): """ Create a TC task. NOTE: This code needs to be tested for normal TC tasks to determine if the default values would also work for non BBB tasks. """ metadata = _query_metadata(repo_name, revision, name=kwargs.get("metadata_name")) task_id = kwargs.get("taskId", slugId()) task_definition = { "taskId": task_id, # Do not retry the task if it fails to run successfuly "reruns": kwargs.get("reruns", 0), "task": { "workerType": kwargs["workerType"], # mandatory "provisionerId": kwargs["provisionerId"], # mandatory "created": kwargs.get("created", fromNow("0d")), "deadline": kwargs.get("deadline", fromNow("1d")), "expires": kwargs.get("deadline", fromNow("1d")), "payload": kwargs.get("payload", {}), "metadata": kwargs.get("metadata", metadata), "schedulerId": kwargs.get("schedulerId", "task-graph-scheduler"), "tags": kwargs.get("tags", {}), "extra": kwargs.get("extra", {}), "routes": kwargs.get("routes", []), "priority": kwargs.get("priority", "normal"), "retries": kwargs.get("retries", 5), "scopes": kwargs.get("scopes", []), }, } if kwargs.get("taskGroupId"): task_definition["task"]["taskGroupId"] = (kwargs.get("taskGroupId", task_id),) return task_definition
def test_examples(): for example in examples: from_ = dateutil.parser.parse(example['from']) res = dateutil.parser.parse(example['result']) assert subject.fromNow(example['expr'], from_) == res
def test_examples(self): for example in self.examples: from_ = dateutil.parser.parse(example['from']) res = dateutil.parser.parse(example['result']) self.assertEqual(subject.fromNow(example['expr'], from_), res)
def build_tasks(self, parent_task_id, env=None): """Create fuzzing tasks and attach them to a decision task""" now = datetime.utcnow() deps = [parent_task_id] preprocess = self.create_preprocess() if preprocess is not None: task_id = slugId() task = { "taskGroupId": parent_task_id, "dependencies": [parent_task_id], "created": stringDate(now), "deadline": stringDate(now + timedelta(seconds=preprocess.max_run_time)), "expires": stringDate(fromNow("1 week", now)), "extra": {}, "metadata": { "description": DESCRIPTION, "name": f"Fuzzing task {self.task_id} - preprocess", "owner": OWNER_EMAIL, "source": "https://github.com/MozillaSecurity/fuzzing-tc", }, "payload": { "artifacts": preprocess.artifact_map(stringDate(fromNow("1 week", now))), "cache": {}, "capabilities": {}, "env": { "TASKCLUSTER_FUZZING_POOL": self.pool_id, "TASKCLUSTER_SECRET": DECISION_TASK_SECRET, "TASKCLUSTER_FUZZING_PREPROCESS": "1", }, "features": { "taskclusterProxy": True }, "image": preprocess.container, "maxRunTime": preprocess.max_run_time, }, "priority": "high", "provisionerId": PROVISIONER_ID, "workerType": self.task_id, "retries": 5, "routes": [], "schedulerId": SCHEDULER_ID, "scopes": preprocess.scopes + [f"secrets:get:{DECISION_TASK_SECRET}"], "tags": {}, } add_capabilities_for_scopes(task) if env is not None: assert set(task["payload"]["env"]).isdisjoint(set(env)) task["payload"]["env"].update(env) deps.append(task_id) yield task_id, task for i in range(1, self.tasks + 1): task_id = slugId() task = { "taskGroupId": parent_task_id, "dependencies": deps, "created": stringDate(now), "deadline": stringDate(now + timedelta(seconds=self.max_run_time)), "expires": stringDate(fromNow("1 week", now)), "extra": {}, "metadata": { "description": DESCRIPTION, "name": f"Fuzzing task {self.task_id} - {i}/{self.tasks}", "owner": OWNER_EMAIL, "source": "https://github.com/MozillaSecurity/fuzzing-tc", }, "payload": { "artifacts": self.artifact_map(stringDate(fromNow("1 week", now))), "cache": {}, "capabilities": {}, "env": { "TASKCLUSTER_FUZZING_POOL": self.pool_id, "TASKCLUSTER_SECRET": DECISION_TASK_SECRET, }, "features": { "taskclusterProxy": True }, "image": self.container, "maxRunTime": self.max_run_time, }, "priority": "high", "provisionerId": PROVISIONER_ID, "workerType": self.task_id, "retries": 5, "routes": [], "schedulerId": SCHEDULER_ID, "scopes": self.scopes + [f"secrets:get:{DECISION_TASK_SECRET}"], "tags": {}, } add_capabilities_for_scopes(task) if env is not None: assert set(task["payload"]["env"]).isdisjoint(set(env)) task["payload"]["env"].update(env) yield task_id, task
def build_tasks(self, parent_task_id, env=None): """Create fuzzing tasks and attach them to a decision task""" now = datetime.utcnow() preprocess_task_id = None preprocess = self.create_preprocess() if preprocess is not None: task = yaml.safe_load( FUZZING_TASK.substitute( created=stringDate(now), deadline=stringDate(now + timedelta( seconds=preprocess.max_run_time)), description=DESCRIPTION.replace("\n", "\\n"), expires=stringDate(fromNow("1 week", now)), max_run_time=preprocess.max_run_time, name=f"Fuzzing task {self.task_id} - preprocess", owner_email=OWNER_EMAIL, pool_id=self.pool_id, provisioner=PROVISIONER_ID, scheduler=SCHEDULER_ID, secret=DECISION_TASK_SECRET, task_group=parent_task_id, task_id=self.task_id, )) task["payload"]["artifacts"].update( preprocess.artifact_map(stringDate(fromNow("1 week", now)))) task["payload"]["env"]["TASKCLUSTER_FUZZING_PREPROCESS"] = "1" # `container` can be either a string or a dict, so can't template it task["payload"]["image"] = preprocess.container task["scopes"] = sorted(chain(preprocess.scopes, task["scopes"])) add_capabilities_for_scopes(task) if env is not None: assert set(task["payload"]["env"]).isdisjoint(set(env)) task["payload"]["env"].update(env) preprocess_task_id = slugId() yield preprocess_task_id, task for i in range(1, self.tasks + 1): task = yaml.safe_load( FUZZING_TASK.substitute( created=stringDate(now), deadline=stringDate(now + timedelta(seconds=self.max_run_time)), description=DESCRIPTION.replace("\n", "\\n"), expires=stringDate(fromNow("1 week", now)), max_run_time=self.max_run_time, name=f"Fuzzing task {self.task_id} - {i}/{self.tasks}", owner_email=OWNER_EMAIL, pool_id=self.pool_id, provisioner=PROVISIONER_ID, scheduler=SCHEDULER_ID, secret=DECISION_TASK_SECRET, task_group=parent_task_id, task_id=self.task_id, )) task["payload"]["artifacts"].update( self.artifact_map(stringDate(fromNow("1 week", now)))) # `container` can be either a string or a dict, so can't template it task["payload"]["image"] = self.container if preprocess_task_id is not None: task["dependencies"].append(preprocess_task_id) task["scopes"] = sorted(chain(self.scopes, task["scopes"])) add_capabilities_for_scopes(task) if env is not None: assert set(task["payload"]["env"]).isdisjoint(set(env)) task["payload"]["env"].update(env) yield slugId(), task