def test_task_query_by_task_id(): task_id = "A-8AqzvvRsqH9b0VHBXYjA" with patch.object(taskcluster.Queue, "task", new=mocked_definition), patch.object(taskcluster.Queue, "status", new=mocked_status): task = Task(task_id=task_id) assert task.kind == "test" assert task.state == "completed"
def test_task_query_by_task_id(): task_id = 'A-8AqzvvRsqH9b0VHBXYjA' with patch.object(taskcluster.Queue, 'task', new=mocked_definition) as mocked_method, patch.object(taskcluster.Queue, 'status', new=mocked_status) as mocked_method2: task = Task(task_id=task_id) assert task.kind == 'test' assert task.state == 'completed'
async def _read_file_cache(self): try: jsondata = json.loads(await fetch_file(self.cache_file)) self.tasklist = [Task(json=data) for data in jsondata] except Exception as e: log.warning(e) return False return True
async def fetch_tasks(self, limit=None): """Return tasks with the associated group ID. Handles continuationToken without the user being aware of it. Enforces the limit parameter as a limit of the total number of tasks to be returned. """ if self.cache_file: if self._read_file_cache(): return query = {} if limit: # Default taskcluster-client api asks for 1000 tasks. query['limit'] = min(limit, 1000) def under_limit(length): """Indicate if we've returned enough tasks.""" if not limit or length < limit: return True return False async with aiohttp.ClientSession() as session: queue = Queue(session=session) outcome = await queue.listTaskGroup(self.groupid, query=query) tasks = outcome.get('tasks', []) while under_limit(len(tasks)) and outcome.get('continuationToken'): query.update({ 'continuationToken': outcome.get('continuationToken') }) outcome = await queue.listTaskGroup(self.groupid, query=query) tasks.extend(outcome.get('tasks', [])) if limit: tasks = tasks[:limit] self.tasklist = [Task(json=data) for data in tasks] if self.cache_file: self._write_file_cache()
def test_task_kind(filename, expected): task = Task(json=get_dummy_task_json(filename)) assert task.kind == expected
def test_run_durations(filename, expected): task = Task(json=get_dummy_task_json(filename)) assert task.run_durations() == expected
def test_task_resolved(filename, expected): task = Task(json=get_dummy_task_json(filename)) assert task.resolved == expected
def test_task_scheduled(filename, expected): task = Task(json=get_dummy_task_json(filename)) assert task.scheduled == expected
def test_task_state_completed(filename, expected): task = Task(json=get_dummy_task_json(filename)) assert task.state == expected
def test_task_completed_bool(filename, state): task = Task(json=get_dummy_task_json(filename)) assert task.completed is state
def test_task_taskid(): task = Task(json=get_dummy_task_json("completed.json")) assert task.taskid == "A-8AqzvvRsqH9b0VHBXYjA"
def test_run_durations2(): task = Task(json=get_dummy_task_json("completed.json")) assert task.run_durations() == [datetime.timedelta(seconds=852, microseconds=561000)]
def test_task_no_input(): with pytest.raises(ValueError): Task()
def test_scopes(filename, expected): task = Task(json=get_dummy_task_json(filename)) assert task.scopes == expected
def test_task_names(filename, expected): task = Task(json=get_dummy_task_json(filename)) assert task.label == expected assert task.name == expected
def test_task_has_failures(filename, expected): task = Task(json=get_dummy_task_json(filename)) assert task.has_failures == expected