def test_cleanup_error(*args, **kwargs): mock_get_access_token = args[0] mock_create_files = args[1] mock_driveapi = args[2] mock_walk_files = args[3] mock_retirement_report = kwargs['retirement_partner_report'] mock_retirement_cleanup = kwargs['retirement_partner_cleanup'] mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) mock_create_files.return_value = True mock_driveapi.return_value = None mock_walk_files.return_value = [{ 'name': partner, 'id': 'folder' + partner } for partner in FAKE_ORGS.values()] mock_retirement_report.return_value = _fake_retirement_report( user_orgs=list(FAKE_ORGS.keys())) mock_retirement_cleanup.side_effect = Exception('Mock cleanup exception') result = _call_script(expect_success=False) assert mock_retirement_cleanup.called_with([ user[LEARNER_ORIGINAL_USERNAME_KEY] for user in mock_retirement_report.return_value ]) assert result.exit_code == ERR_CLEANUP assert 'Users may be stuck in the processing state!' in result.output
def test_reporting_error(*args): mock_retirement_report = args[0] mock_dictwriter = args[1] mock_get_access_token = args[2] mock_drive_init = args[4] error_msg = 'Fake unable to write csv' mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) mock_dictwriter.side_effect = Exception(error_msg) mock_drive_init.return_value = None mock_retirement_report.return_value = _fake_retirement_report( user_orgs=list(FAKE_ORGS.keys())) result = _call_script(expect_success=False) assert result.exit_code == ERR_REPORTING assert error_msg in result.output
def _fake_retirement_report_user(seed_val, user_orgs=None): """ Creates unique user to populate a fake report with. - seed_val is a number or other unique value for this user, will be formatted into user values to make sure they're distinct. - user_orgs, if given, should be a list of orgs that will be associated with the user. """ if user_orgs is None: user_orgs = list(FAKE_ORGS.keys()) return { 'user_id': USER_ID, 'original_username': '******'.format(seed_val), 'original_email': 'user_{}@foo.invalid'.format(seed_val), 'original_name': '{} {}'.format(UNICODE_NAME_CONSTANT, seed_val), 'orgs': user_orgs, 'created': DELETION_TIME, }
def test_google_unicode_folder_names(*args, **kwargs): mock_get_access_token = args[0] mock_create_comments = args[1] mock_list_permissions = args[2] mock_walk_files = args[3] mock_create_files = args[4] mock_driveapi = args[5] mock_retirement_report = kwargs['retirement_partner_report'] mock_retirement_cleanup = kwargs['retirement_partner_cleanup'] mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) mock_list_permissions.return_value = { 'folder' + partner: [ { 'emailAddress': '*****@*****.**' }, { 'emailAddress': '*****@*****.**' }, ] for partner in [ unicodedata.normalize('NFKC', u'TéstX'), unicodedata.normalize('NFKC', u'TéstX2'), unicodedata.normalize('NFKC', u'TéstX3'), ] } mock_walk_files.return_value = [{ 'name': partner, 'id': 'folder' + partner } for partner in [ unicodedata.normalize('NFKC', u'TéstX'), unicodedata.normalize('NFKC', u'TéstX2'), unicodedata.normalize('NFKC', u'TéstX3'), ]] mock_create_files.side_effect = ['foo', 'bar', 'baz'] mock_driveapi.return_value = None mock_retirement_report.return_value = _fake_retirement_report( user_orgs=list(FAKE_ORGS.keys())) config_orgs = { 'org1': unicodedata.normalize('NFKC', u'TéstX'), 'org2': unicodedata.normalize('NFD', u'TéstX2'), 'org3': unicodedata.normalize('NFKD', u'TéstX3'), } result = _call_script(config_orgs=config_orgs) # Make sure we're getting the LMS token mock_get_access_token.assert_called_once() # Make sure that we get the report mock_retirement_report.assert_called_once() # Make sure we tried to upload the files assert mock_create_files.call_count == 3 # Make sure we tried to add comments to the files assert mock_create_comments.call_count == 1 # First [0] returns all positional args, second [0] gets the first positional arg. create_comments_file_ids, create_comments_messages = zip( *mock_create_comments.call_args[0][0]) assert set(create_comments_file_ids) == set(['foo', 'bar', 'baz']) assert all('*****@*****.**' in msg for msg in create_comments_messages) assert all('*****@*****.**' not in msg for msg in create_comments_messages) # Make sure we tried to remove the users from the queue mock_retirement_cleanup.assert_called_with([{ 'original_username': user[LEARNER_ORIGINAL_USERNAME_KEY] } for user in mock_retirement_report.return_value]) assert 'All reports completed and uploaded to Google.' in result.output
def _call_script(expect_success=True, expected_num_rows=10, config_orgs=None, expected_fields=None): """ Call the retired learner script with the given username and a generic, temporary config file. Returns the CliRunner.invoke results """ if expected_fields is None: expected_fields = DEFAULT_FIELD_VALUES if config_orgs is None: config_orgs = FAKE_ORGS runner = CliRunner() with runner.isolated_filesystem(): with open(TEST_CONFIG_YML_NAME, 'w') as config_f: fake_config_file(config_f, config_orgs) with open(TEST_GOOGLE_SECRETS_FILENAME, 'w') as secrets_f: fake_google_secrets_file(secrets_f) tmp_output_dir = 'test_output_dir' os.mkdir(tmp_output_dir) result = runner.invoke(generate_report, args=[ '--config_file', TEST_CONFIG_YML_NAME, '--google_secrets_file', TEST_GOOGLE_SECRETS_FILENAME, '--output_dir', tmp_output_dir ]) print(result) print(result.output) if expect_success: assert result.exit_code == 0 if config_orgs is None: # These are the orgs config_org_vals = FAKE_ORGS.values() else: config_org_vals = config_orgs.values() # Normalize the unicode as the script does if PY2: config_org_vals = [ org.decode('utf-8') for org in config_org_vals ] config_org_vals = [ unicodedata.normalize('NFKC', org) for org in config_org_vals ] for org in config_org_vals: outfile = os.path.join( tmp_output_dir, '{}_{}_{}_{}.csv'.format(REPORTING_FILENAME_PREFIX, TEST_PLATFORM_NAME, org, date.today().isoformat())) with open(outfile, 'r') as csvfile: reader = csv.DictReader(csvfile) rows = [] for row in reader: for field_key in expected_fields: field_value = expected_fields[field_key] assert field_value in row[field_key] rows.append(row) # Confirm the number of rows assert len(rows) == expected_num_rows return result
def test_successful_report(*args, **kwargs): mock_get_access_token = args[0] mock_create_comments = args[1] mock_list_permissions = args[2] mock_walk_files = args[3] mock_create_files = args[4] mock_driveapi = args[5] mock_retirement_report = kwargs['retirement_partner_report'] mock_retirement_cleanup = kwargs['retirement_partner_cleanup'] mock_get_access_token.return_value = ('THIS_IS_A_JWT', None) mock_create_comments.return_value = None fake_partners = list(itervalues(FAKE_ORGS)) # Generate the list_permissions return value. # The first few have POCs. mock_list_permissions.return_value = { 'folder' + partner: [ { 'emailAddress': '*****@*****.**' }, # The POC. { 'emailAddress': '*****@*****.**' }, ] for partner in fake_partners[:2] } # The last one does not have any POCs. mock_list_permissions.return_value.update({ 'folder' + partner: [ { 'emailAddress': '*****@*****.**' }, ] for partner in [fake_partners[2]] }) mock_walk_files.return_value = [{ 'name': partner, 'id': 'folder' + partner } for partner in FAKE_ORGS.values()] mock_create_files.side_effect = ['foo', 'bar', 'baz'] mock_driveapi.return_value = None mock_retirement_report.return_value = _fake_retirement_report( user_orgs=list(FAKE_ORGS.keys())) result = _call_script() # Make sure we're getting the LMS token mock_get_access_token.assert_called_once() # Make sure that we get the report mock_retirement_report.assert_called_once() # Make sure we tried to upload the files assert mock_create_files.call_count == 3 # Make sure we tried to add comments to the files assert mock_create_comments.call_count == 1 # First [0] returns all positional args, second [0] gets the first positional arg. create_comments_file_ids, create_comments_messages = zip( *mock_create_comments.call_args[0][0]) assert set(create_comments_file_ids).issubset(set(['foo', 'bar', 'baz'])) assert len( create_comments_file_ids ) == 2 # only two comments created, the third didn't have a POC. assert all('*****@*****.**' in msg for msg in create_comments_messages) assert all('*****@*****.**' not in msg for msg in create_comments_messages) assert 'WARNING: could not find a POC' in result.output # Make sure we tried to remove the users from the queue mock_retirement_cleanup.assert_called_with([{ 'original_username': user[LEARNER_ORIGINAL_USERNAME_KEY] } for user in mock_retirement_report.return_value]) assert 'All reports completed and uploaded to Google.' in result.output
def _call_script(expect_success=True, config_orgs=None): """ Call the retired learner script with the given username and a generic, temporary config file. Returns the CliRunner.invoke results """ if config_orgs is None: config_orgs = FAKE_ORGS runner = CliRunner() with runner.isolated_filesystem(): with open(TEST_CONFIG_YML_NAME, 'w') as config_f: fake_config_file(config_f, config_orgs) with open(TEST_GOOGLE_SECRETS_FILENAME, 'w') as secrets_f: fake_google_secrets_file(secrets_f) tmp_output_dir = 'test_output_dir' os.mkdir(tmp_output_dir) result = runner.invoke(generate_report, args=[ '--config_file', TEST_CONFIG_YML_NAME, '--google_secrets_file', TEST_GOOGLE_SECRETS_FILENAME, '--output_dir', tmp_output_dir ]) print(result) print(result.output) if expect_success: assert result.exit_code == 0 if config_orgs is None: # These are the orgs config_org_vals = FAKE_ORGS.values() else: config_org_vals = config_orgs.values() # Normalize the unicode as the script does if PY2: config_org_vals = [ org.decode('utf-8') for org in config_org_vals ] config_org_vals = [ unicodedata.normalize('NFKC', org) for org in config_org_vals ] for org in config_org_vals: outfile = os.path.join( tmp_output_dir, '{}_{}_{}_{}.csv'.format(REPORTING_FILENAME_PREFIX, TEST_PLATFORM_NAME, org, date.today().isoformat())) with open(outfile, 'r') as csvfile: reader = csv.DictReader(csvfile) rows = [] for row in reader: # Check the user_id value is in the correct place assert USER_ID in row['user_id'] # Check username value is in the correct place assert 'username' in row['original_username'] # Check email value is in the correct place assert 'invalid' in row['original_email'] # Check name value is in the correct place assert UNICODE_NAME_CONSTANT in row['original_name'] # Check deletion_completed value is in the correct place assert DELETION_TIME in row['deletion_completed'] rows.append(row) # Confirm that there are rows at all assert rows return result