def test_verify_data(self): parser = setup_args() opt = parser.parse_args(print_args=False) changed_files = testing_utils.git_changed_files() changed_task_files = [] for file in changed_files: if ('parlai/tasks' in file and 'README' not in file and 'task_list.py' not in file): changed_task_files.append(file) if not changed_task_files: return found_errors = False for file in changed_task_files: task = file.split('/')[-2] module_name = "%s.tasks.%s.agents" % ('parlai', task) task_module = importlib.import_module(module_name) subtasks = [ ':'.join([task, x]) for x in dir(task_module) if ('teacher' in x.lower() and x not in BASE_TEACHERS) ] if testing_utils.is_this_circleci(): if len(subtasks) == 0: continue self.fail( 'test_verify_data plays poorly with CircleCI. Please run ' '`python tests/datatests/test_new_tasks.py` locally and ' 'paste the output in your pull request.') for subt in subtasks: parser = setup_args() opt = parser.parse_args(args=['--task', subt], print_args=False) opt['task'] = subt try: with testing_utils.capture_output(): text, log = verify(opt, print_parser=False) except Exception: found_errors = True traceback.print_exc() print("Got above exception in {}".format(subt)) for key in KEYS: if log[key] != 0: print('There are {} {} in {}.'.format( log[key], key, subt, )) found_errors = True self.assertFalse(found_errors, "Errors were found.")
def test_verify_data(self): parser = setup_args() opt = parser.parse_args(print_args=False) changed_files = testing_utils.git_changed_files() changed_task_files = [] for file in changed_files: if ('parlai/tasks' in file and 'README' not in file and 'task_list.py' not in file): changed_task_files.append(file) if not changed_task_files: return for file in changed_task_files: task = file.split('/')[-2] module_name = "%s.tasks.%s.agents" % ('parlai', task) task_module = importlib.import_module(module_name) subtasks = [ ':'.join([task, x]) for x in dir(task_module) if ('teacher' in x.lower() and x not in BASE_TEACHERS) ] if testing_utils.is_this_circleci(): if len(subtasks) == 0: continue self.fail( 'test_verify_data plays poorly with CircleCI. Please run ' '`python tests/data/test_new_tasks.py` locally and paste the ' 'output in your pull request.') for subt in subtasks: parser = setup_args() opt = parser.parse_args(args=['--task', subt], print_args=False) opt['task'] = subt with testing_utils.capture_output(): text, log = verify(opt, print_parser=False) for key in KEYS: self.assertEqual( log[key], 0, 'There are {} {} in this task.'.format(log[key], log))