def test_update_version_in_config_files_updates_version(self): package_json_swap = self.swap( update_changelog_and_credits, 'PACKAGE_JSON_FILEPATH', MOCK_PACKAGE_JSON_PATH ) package_json_content = python_utils.open_file( MOCK_PACKAGE_JSON_PATH, 'r').read() package_json_regex = re.compile('"version": ".*"') expected_package_json_content = package_json_regex.sub( '"version": "1.2.3"', package_json_content) feconf_swap = self.swap(common, 'FECONF_PATH', MOCK_FECONF_PATH) feconf_content = python_utils.open_file(MOCK_FECONF_PATH, 'r').read() feconf_regex = re.compile('OPPIA_VERSION = \'.*\'') expected_feconf_content = feconf_regex.sub( 'OPPIA_VERSION = \'1.2.3\'', feconf_content) try: with contextlib.ExitStack() as stack: stack.enter_context(self.branch_name_swap) stack.enter_context(feconf_swap) stack.enter_context(package_json_swap) update_changelog_and_credits.update_version_in_config_files() updated_package_json_content = python_utils.open_file( MOCK_PACKAGE_JSON_PATH, 'r').read() updated_feconf_content = python_utils.open_file( MOCK_FECONF_PATH, 'r').read() self.assertEqual( updated_package_json_content, expected_package_json_content) self.assertEqual(updated_feconf_content, expected_feconf_content) finally: write_to_file(MOCK_PACKAGE_JSON_PATH, package_json_content) write_to_file(MOCK_FECONF_PATH, feconf_content)
def update_sorted_file(filepath, new_list): """Updates the files AUTHORS and CONTRIBUTORS with a sorted list of new authors or contributors. Args: filepath: str. The path of the file to update. new_list: list(str). The list of new authors or contributors to add to the file. """ file_lines = [] with python_utils.open_file(filepath, 'r') as f: file_lines = f.readlines() for line in file_lines: if line.startswith('#'): last_comment_line = line # start_index is the index of line where list of authors/contributors # starts. The line with the last comment is followed by a empty line # and then the sorted list. So, the start_index is the index of # last_comment_line plus 2. start_index = file_lines.index(last_comment_line) + 2 updated_list = list(set(new_list + file_lines[start_index:])) updated_list = sorted(updated_list, key=lambda s: s.lower()) file_lines = file_lines[:start_index] + updated_list with python_utils.open_file(filepath, 'w') as f: for line in file_lines: f.write(line)
def update_developer_names(release_summary_lines): """Updates about-page.constants.ts file. Args: release_summary_lines: list(str). List of lines in ../release_summary.md. """ python_utils.PRINT('Updating about-page file...') new_developer_names = get_new_contributors(release_summary_lines, return_only_names=True) with python_utils.open_file(ABOUT_PAGE_CONSTANTS_FILEPATH, 'r') as about_page_file: about_page_lines = about_page_file.readlines() start_index = about_page_lines.index(CREDITS_START_LINE) + 1 end_index = about_page_lines[start_index:].index(CREDITS_END_LINE) + 1 all_developer_names = about_page_lines[start_index:end_index] for name in new_developer_names: all_developer_names.append('%s\'%s\',\n' % (CREDITS_INDENT, name)) all_developer_names = sorted(list(set(all_developer_names)), key=lambda s: s.lower()) about_page_lines[start_index:end_index] = all_developer_names with python_utils.open_file(ABOUT_PAGE_CONSTANTS_FILEPATH, 'w') as about_page_file: for line in about_page_lines: about_page_file.write(python_utils.UNICODE(line)) python_utils.PRINT('Updated about-page file!')
def inplace_replace_file(filename, regex_pattern, replacement_string): """Replace the file content in-place with regex pattern. The pattern is used to replace the file's content line by line. Note: This function should only be used with files that are processed line by line. Args: filename: str. The name of the file to be changed. regex_pattern: str. The pattern to check. replacement_string: str. The content to be replaced. """ backup_filename = '%s.bak' % filename shutil.copyfile(filename, backup_filename) new_contents = [] try: regex = re.compile(regex_pattern) with python_utils.open_file(backup_filename, 'r') as f: for line in f: new_contents.append(regex.sub(replacement_string, line)) with python_utils.open_file(filename, 'w') as f: for line in new_contents: f.write(line) os.remove(backup_filename) except Exception: # Restore the content if there was en error. os.remove(filename) shutil.move(backup_filename, filename) raise
def test_update_to_registration_updated_date(self): def mock_input(): return 'y' def mock_get_commit(unused_self, unused_sha): return github.Commit.Commit( requester='', headers='', attributes={ 'commit': {'committer': {'date': '2016-11-15T3:41:01Z'}}}, completed='') input_swap = self.swap(builtins, 'input', mock_input) get_commit_swap = self.swap( github.Repository.Repository, 'get_commit', mock_get_commit) temp_feconf_path = tempfile.NamedTemporaryFile().name feconf_text = ( '# When the site terms were last updated, in UTC.\n' 'REGISTRATION_PAGE_LAST_UPDATED_UTC = ' 'datetime.datetime(2015, 10, 14, 2, 40, 0)\n' '# Format of string for dashboard statistics logs.\n' '# NOTE TO DEVELOPERS: This format should not be changed, ' 'since it is used in\n' '# the existing storage models for UserStatsModel.\n' 'DASHBOARD_STATS_DATETIME_STRING_FORMAT = \'%Y-%m-%d\'\n') expected_feconf_text = feconf_text.replace( 'datetime.datetime(2015, 10, 14, 2, 40, 0)', 'datetime.datetime(2016, 11, 15, 3, 41, 1)') with python_utils.open_file(temp_feconf_path, 'w') as f: f.write(feconf_text) with self.getpass_swap, self.get_org_swap, self.get_repo_swap: with self.open_tab_swap, input_swap, get_commit_swap: update_configs.check_updates_to_terms_of_service( temp_feconf_path, 'test-token') with python_utils.open_file(temp_feconf_path, 'r') as f: self.assertEqual(f.read(), expected_feconf_text)
def test_set_constants_to_default(self): mock_constants_path = 'mock_app_dev.yaml' mock_feconf_path = 'mock_app.yaml' constants_path_swap = self.swap(common, 'CONSTANTS_FILE_PATH', mock_constants_path) feconf_path_swap = self.swap(common, 'FECONF_PATH', mock_feconf_path) constants_temp_file = tempfile.NamedTemporaryFile() constants_temp_file.name = mock_constants_path with python_utils.open_file(mock_constants_path, 'w') as tmp: tmp.write('export = {\n') tmp.write(' "DEV_MODE": false,\n') tmp.write(' "EMULATOR_MODE": false,\n') tmp.write('};') feconf_temp_file = tempfile.NamedTemporaryFile() feconf_temp_file.name = mock_feconf_path with python_utils.open_file(mock_feconf_path, 'w') as tmp: tmp.write(u'ENABLE_MAINTENANCE_MODE = True') with constants_path_swap, feconf_path_swap: build.set_constants_to_default() with python_utils.open_file(mock_constants_path, 'r') as constants_file: self.assertEqual( constants_file.read(), 'export = {\n' ' "DEV_MODE": true,\n' ' "EMULATOR_MODE": true,\n' '};') with python_utils.open_file(mock_feconf_path, 'r') as feconf_file: self.assertEqual(feconf_file.read(), 'ENABLE_MAINTENANCE_MODE = False') constants_temp_file.close() feconf_temp_file.close()
def test_process_html(self): """Test process_html removes whitespaces.""" base_html_source_path = (os.path.join(MOCK_TEMPLATES_DEV_DIR, 'base.html')) build._ensure_files_exist([base_html_source_path]) # pylint: disable=protected-access # Prepare a file_stream object from python_utils.string_io(). minified_html_file_stream = python_utils.string_io() # Assert that base.html has white spaces and has original filepaths. with python_utils.open_file(base_html_source_path, 'r') as source_base_file: source_base_file_content = source_base_file.read() self.assertRegexpMatches( source_base_file_content, r'\s{2,}', msg='No white spaces detected in %s unexpectedly' % base_html_source_path) # Build base.html file. with python_utils.open_file(base_html_source_path, 'r') as source_base_file: build.process_html(source_base_file, minified_html_file_stream) minified_html_file_content = minified_html_file_stream.getvalue() self.assertNotRegexpMatches( minified_html_file_content, r'\s{2,}', msg='All white spaces must be removed from %s' % base_html_source_path)
def add_mailchimp_api_key(release_feconf_path): """Adds mailchimp api key to feconf config file. Args: release_feconf_path: str. The path to feconf file in release directory. """ mailchimp_api_key = getpass.getpass( prompt=('Enter mailchimp api key from the release process doc.')) mailchimp_api_key = mailchimp_api_key.strip() while re.match('^[a-z0-9]{32}-us18$', mailchimp_api_key) is None: mailchimp_api_key = getpass.getpass( prompt=( 'You have entered an invalid mailchimp api ' 'key: %s, please retry.' % mailchimp_api_key)) mailchimp_api_key = mailchimp_api_key.strip() feconf_lines = [] with python_utils.open_file(release_feconf_path, 'r') as f: feconf_lines = f.readlines() error_text = 'Missing mailchimp API key' assert 'MAILCHIMP_API_KEY = None\n' in feconf_lines, error_text with python_utils.open_file(release_feconf_path, 'w') as f: for line in feconf_lines: if line == 'MAILCHIMP_API_KEY = None\n': line = line.replace('None', '\'%s\'' % mailchimp_api_key) f.write(line)
def inplace_replace_file_context(filename, regex_pattern, replacement_string): """Context manager in which the file's content is replaced according to the given regex pattern. This function should only be used with files that are processed line by line. Args: filename: str. The name of the file to be changed. regex_pattern: str. The pattern to check. replacement_string: str. The content to be replaced. Yields: None. Nothing. """ backup_filename = '%s.bak' % filename regex = re.compile(regex_pattern) shutil.copyfile(filename, backup_filename) try: with python_utils.open_file(backup_filename, 'r') as f: new_contents = [regex.sub(replacement_string, line) for line in f] with python_utils.open_file(filename, 'w') as f: f.write(''.join(new_contents)) yield finally: if os.path.isfile(filename) and os.path.isfile(backup_filename): os.remove(filename) if os.path.isfile(backup_filename): shutil.move(backup_filename, filename)
def test_invalid_mailchimp_api_key(self): check_prompts = { 'Enter mailchimp api key from the release process doc.': False, 'You have entered an invalid mailchimp api key: invalid, ' 'please retry.': False } expected_check_prompts = { 'Enter mailchimp api key from the release process doc.': True, 'You have entered an invalid mailchimp api key: invalid, ' 'please retry.': True } mailchimp_api_key = ('%s-us18' % ('').join(['1'] * 32)) def mock_getpass(prompt): check_prompts[prompt] = True if 'invalid' in prompt: return mailchimp_api_key return 'invalid' getpass_swap = self.swap(getpass, 'getpass', mock_getpass) temp_feconf_path = tempfile.NamedTemporaryFile().name feconf_text = ( 'REDISHOST = \'192.13.2.1\'\n' 'MAILGUN_API_KEY = None\n' 'MAILCHIMP_API_KEY = None\n' '# When the site terms were last updated, in UTC.\n' 'REGISTRATION_PAGE_LAST_UPDATED_UTC = ' 'datetime.datetime(2015, 10, 14, 2, 40, 0)\n' '# Format of string for dashboard statistics logs.\n' '# NOTE TO DEVELOPERS: This format should not be changed, ' 'since it is used in\n' '# the existing storage models for UserStatsModel.\n' 'DASHBOARD_STATS_DATETIME_STRING_FORMAT = \'YY-mm-dd\'\n') expected_feconf_text = ( 'REDISHOST = \'192.13.2.1\'\n' 'MAILGUN_API_KEY = None\n' 'MAILCHIMP_API_KEY = \'%s\'\n' '# When the site terms were last updated, in UTC.\n' 'REGISTRATION_PAGE_LAST_UPDATED_UTC = ' 'datetime.datetime(2015, 10, 14, 2, 40, 0)\n' '# Format of string for dashboard statistics logs.\n' '# NOTE TO DEVELOPERS: This format should not be changed, ' 'since it is used in\n' '# the existing storage models for UserStatsModel.\n' 'DASHBOARD_STATS_DATETIME_STRING_FORMAT = \'YY-mm-dd\'\n' % (mailchimp_api_key)) with python_utils.open_file(temp_feconf_path, 'w') as f: f.write(feconf_text) with getpass_swap: update_configs.add_mailchimp_api_key(temp_feconf_path) self.assertEqual(check_prompts, expected_check_prompts) with python_utils.open_file(temp_feconf_path, 'r') as f: self.assertEqual(f.read(), expected_feconf_text)
def setUp(self): super(ImageServicesUnitTests, self).setUp() with python_utils.open_file( os.path.join(feconf.TESTS_DATA_DIR, 'dummy_large_image.jpg'), 'rb', encoding=None) as f: self.jpeg_raw_image = f.read() with python_utils.open_file( os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb', encoding=None) as f: self.png_raw_image = f.read()
def inplace_replace_file( filename, regex_pattern, replacement_string, expected_number_of_replacements=None ): """Replace the file content in-place with regex pattern. The pattern is used to replace the file's content line by line. Note: This function should only be used with files that are processed line by line. Args: filename: str. The name of the file to be changed. regex_pattern: str. The pattern to check. replacement_string: str. The content to be replaced. expected_number_of_replacements: optional(int). The number of replacements that should be made. When None no check is done. """ backup_filename = '%s.bak' % filename shutil.copyfile(filename, backup_filename) new_contents = [] total_number_of_replacements = 0 try: regex = re.compile(regex_pattern) with python_utils.open_file(backup_filename, 'r') as f: for line in f: new_line, number_of_replacements = regex.subn( replacement_string, line) new_contents.append(new_line) total_number_of_replacements += number_of_replacements with python_utils.open_file(filename, 'w') as f: for line in new_contents: f.write(line) if ( expected_number_of_replacements is not None and total_number_of_replacements != expected_number_of_replacements ): raise ValueError( 'Wrong number of replacements. Expected %s. Performed %s.' % ( expected_number_of_replacements, total_number_of_replacements ) ) os.remove(backup_filename) except Exception: # Restore the content if there was en error. os.remove(filename) shutil.move(backup_filename, filename) raise
def test_get_refs(self): temp_stdin_file = tempfile.NamedTemporaryFile().name with python_utils.open_file(temp_stdin_file, 'w') as f: f.write('local_ref local_sha1 remote_ref remote_sha1') with python_utils.open_file(temp_stdin_file, 'r') as f: with self.swap(sys, 'stdin', f): self.assertEqual(pre_push_hook.get_refs(), [ pre_push_hook.GitRef(local_ref='local_ref', local_sha1='local_sha1', remote_ref='remote_ref', remote_sha1='remote_sha1') ])
def test_update_developer_names(self): with python_utils.open_file( update_changelog_and_credits.ABOUT_PAGE_CONSTANTS_FILEPATH, 'r' ) as f: about_page_lines = f.readlines() start_index = about_page_lines.index( update_changelog_and_credits.CREDITS_START_LINE) + 1 end_index = about_page_lines[start_index:].index( update_changelog_and_credits.CREDITS_END_LINE) + 1 existing_developer_names = about_page_lines[start_index:end_index] tmp_file = tempfile.NamedTemporaryFile() tmp_file.name = MOCK_ABOUT_PAGE_CONSTANTS_FILEPATH with python_utils.open_file( MOCK_ABOUT_PAGE_CONSTANTS_FILEPATH, 'w' ) as f: for line in about_page_lines: f.write(str(line)) release_summary_lines = read_from_file(MOCK_RELEASE_SUMMARY_FILEPATH) new_developer_names = update_changelog_and_credits.get_new_contributors( release_summary_lines, return_only_names=True) expected_developer_names = existing_developer_names for name in new_developer_names: expected_developer_names.append('%s\'%s\',\n' % ( update_changelog_and_credits.CREDITS_INDENT, name)) expected_developer_names = sorted( list(set(expected_developer_names)), key=lambda s: s.lower()) with self.swap( update_changelog_and_credits, 'ABOUT_PAGE_CONSTANTS_FILEPATH', MOCK_ABOUT_PAGE_CONSTANTS_FILEPATH): update_changelog_and_credits.update_developer_names( release_summary_lines) with python_utils.open_file(tmp_file.name, 'r') as f: about_page_lines = f.readlines() start_index = about_page_lines.index( update_changelog_and_credits.CREDITS_START_LINE) + 1 end_index = about_page_lines[start_index:].index( update_changelog_and_credits.CREDITS_END_LINE) + 1 actual_developer_names = about_page_lines[start_index:end_index] self.assertEqual(actual_developer_names, expected_developer_names) tmp_file.close() if os.path.isfile(MOCK_ABOUT_PAGE_CONSTANTS_FILEPATH): # Occasionally this temp file is not deleted. os.remove(MOCK_ABOUT_PAGE_CONSTANTS_FILEPATH)
def update_changelog( branch_name, release_summary_lines, current_release_version_number): """Updates CHANGELOG file. Args: branch_name: str. The name of the current branch. release_summary_lines: list(str). List of lines in ../release_summary.md. current_release_version_number: str. The version of current release. """ python_utils.PRINT('Updating Changelog...') start_index = release_summary_lines.index( constants.release_constants.CHANGELOG_HEADER) + 1 end_index = release_summary_lines.index( constants.release_constants.COMMIT_HISTORY_HEADER) release_version_changelog = [ u'v%s (%s)\n' % (current_release_version_number, CURRENT_DATE), u'------------------------\n'] + release_summary_lines[ start_index:end_index] changelog_lines = [] with python_utils.open_file(CHANGELOG_FILEPATH, 'r') as changelog_file: changelog_lines = changelog_file.readlines() if constants.release_constants.BRANCH_TYPE_HOTFIX in branch_name: previous_release_version = get_previous_release_version( constants.release_constants.BRANCH_TYPE_HOTFIX, current_release_version_number) changelog_lines = remove_repetition_from_changelog( current_release_version_number, previous_release_version, changelog_lines) else: previous_release_version = get_previous_release_version( constants.release_constants.BRANCH_TYPE_RELEASE, current_release_version_number) # Update only if changelog is generated before and contains info for # current version. if any( line.startswith( 'v%s' % current_release_version_number ) for line in changelog_lines): changelog_lines = remove_repetition_from_changelog( current_release_version_number, previous_release_version, changelog_lines) changelog_lines[2:2] = release_version_changelog with python_utils.open_file(CHANGELOG_FILEPATH, 'w') as changelog_file: for line in changelog_lines: changelog_file.write(line) python_utils.PRINT('Updated Changelog!')
def test_changes_are_applied_to_config(self): with python_utils.open_file(MOCK_LOCAL_FECONF_PATH, 'r') as f: original_text = f.read() expected_text = original_text.replace( 'INCOMING_EMAILS_DOMAIN_NAME = \'\'', 'INCOMING_EMAILS_DOMAIN_NAME = \'oppia.org\'') try: update_configs.apply_changes_based_on_config( MOCK_LOCAL_FECONF_PATH, VALID_FECONF_CONFIG_PATH, update_configs.FECONF_REGEX) with python_utils.open_file(MOCK_LOCAL_FECONF_PATH, 'r') as f: self.assertEqual(f.read(), expected_text) finally: with python_utils.open_file(MOCK_LOCAL_FECONF_PATH, 'w') as f: f.write(original_text)
def test_missing_mailchimp_api_key_line(self): mailchimp_api_key = ('%s-us18' % ('').join(['1'] * 32)) def mock_getpass(prompt): # pylint: disable=unused-argument return mailchimp_api_key getpass_swap = self.swap(getpass, 'getpass', mock_getpass) temp_feconf_path = tempfile.NamedTemporaryFile().name feconf_text = ( 'REDISHOST = \'192.13.2.1\'\n' '# When the site terms were last updated, in UTC.\n' 'REGISTRATION_PAGE_LAST_UPDATED_UTC = ' 'datetime.datetime(2015, 10, 14, 2, 40, 0)\n' '# Format of string for dashboard statistics logs.\n' '# NOTE TO DEVELOPERS: This format should not be changed, ' 'since it is used in\n' '# the existing storage models for UserStatsModel.\n' 'DASHBOARD_STATS_DATETIME_STRING_FORMAT = \'%Y-%m-%d\'\n') with python_utils.open_file(temp_feconf_path, 'w') as f: f.write(feconf_text) with getpass_swap, self.assertRaisesRegexp( AssertionError, 'Missing mailchimp API key'): update_configs.add_mailchimp_api_key(temp_feconf_path)
def test_detect_non_audio_file(self): """Test that filenames with extensions that don't match the audio are detected. """ self.login(self.EDITOR_EMAIL) csrf_token = self.get_new_csrf_token() with python_utils.open_file(os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb', encoding=None) as f: raw_audio = f.read() with self.accepted_audio_extensions_swap: response_dict = self.post_json( '%s/0' % self.AUDIO_UPLOAD_URL_PREFIX, {'filename': self.TEST_AUDIO_FILE_FLAC}, csrf_token=csrf_token, expected_status_int=400, upload_files=(('raw_audio_file', 'unused_filename', raw_audio), )) self.logout() self.assertEqual(response_dict['error'], 'Audio not recognized as a flac file')
def get_release_summary_lines(): """Returns the lines from release summary file. It checks whether incorrect email is present or ordering of sections is invalid. In either case, the user will be asked to update the release summary file and the lines will be re-read. Returns: list(str). List of lines in ../release_summary.md. """ invalid_email_is_present = True ordering_is_invalid = True while invalid_email_is_present or ordering_is_invalid: release_summary_file = python_utils.open_file( constants.release_constants.RELEASE_SUMMARY_FILEPATH, 'r') release_summary_lines = release_summary_file.readlines() invalid_email_is_present = is_invalid_email_present( release_summary_lines) if invalid_email_is_present: common.ask_user_to_confirm( 'The release summary file contains emails of the form: %s ' 'Please replace them with the correct emails. ' '(See error messages above.)' % (constants.release_constants.INVALID_EMAIL_SUFFIX)) ordering_is_invalid = not ( is_order_of_sections_valid(release_summary_lines)) if ordering_is_invalid: common.ask_user_to_confirm( 'Please fix the ordering in release summary file. ' '(See error messages above.)') if invalid_email_is_present or ordering_is_invalid: common.ask_user_to_confirm( 'Please save the file: %s with all the changes that ' 'you have made.' % (constants.release_constants.RELEASE_SUMMARY_FILEPATH)) return release_summary_lines
def verify_feconf(release_feconf_path, verify_email_api_keys): """Verifies that feconf is updated correctly to include mailgun api key, mailchimp api key and redishost. Args: release_feconf_path: str. The path to feconf file in release directory. verify_email_api_keys: bool. Whether to verify both mailgun and mailchimp api keys. """ feconf_contents = python_utils.open_file( release_feconf_path, 'r').read() if verify_email_api_keys and ( 'MAILGUN_API_KEY' not in feconf_contents or 'MAILGUN_API_KEY = None' in feconf_contents): raise Exception('The mailgun API key must be added before deployment.') if verify_email_api_keys and ( 'MAILCHIMP_API_KEY' not in feconf_contents or 'MAILCHIMP_API_KEY = None' in feconf_contents): raise Exception( 'The mailchimp API key must be added before deployment.') if ('REDISHOST' not in feconf_contents or 'REDISHOST = \'localhost\'' in feconf_contents): raise Exception('REDISHOST must be updated before deployment.')
def test_topic_creation(self): self.login(self.CURRICULUM_ADMIN_EMAIL) csrf_token = self.get_new_csrf_token() payload = { 'name': 'Topic name', 'abbreviatedName': 'name-one', 'description': 'Topic description', 'filename': 'test_svg.svg', 'thumbnailBgColor': '#C6DCDA', 'url_fragment': 'name-one' } with python_utils.open_file(os.path.join(feconf.TESTS_DATA_DIR, 'test_svg.svg'), 'rb', encoding=None) as f: raw_image = f.read() json_response = self.post_json(self.url, payload, csrf_token=csrf_token, upload_files=(('image', 'unused_filename', raw_image), )) topic_id = json_response['topicId'] self.assertEqual(len(topic_id), 12) self.assertIsNotNone( topic_fetchers.get_topic_by_id(topic_id, strict=False)) self.logout()
def test_post_with_valid_images(self): """Test question creation with valid images.""" self.login(self.CURRICULUM_ADMIN_EMAIL) csrf_token = self.get_new_csrf_token() filename = 'img.png' question_dict = self.question.to_dict() question_dict['id'] = None question_dict['version'] = 0 content_html = ( '<oppia-noninteractive-image filepath-with-value=' '""img.png"" caption-with-value="""" ' 'alt-with-value=""Image""></oppia-noninteractive-image>') question_dict['question_state_data']['content']['html'] = content_html post_data = { 'question_dict': question_dict, 'skill_ids': [self.skill_id], 'skill_difficulties': [0.6] } with python_utils.open_file(os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb', encoding=None) as f: raw_image = f.read() self.post_json(feconf.NEW_QUESTION_URL, post_data, csrf_token=csrf_token, upload_files=((filename, filename, raw_image), )) all_models = question_models.QuestionModel.get_all() questions = [ question_fetchers.get_question_from_model(model) for model in all_models ] self.assertEqual(len(questions), 2) self.logout()
def test_non_matching_extensions_are_detected(self): """Test that filenames with extensions that don't match the audio are detected. """ self.login(self.EDITOR_EMAIL) csrf_token = self.get_new_csrf_token() # Use an accepted audio extension in mismatched_filename # that differs from the uploaded file's audio type. mismatched_filename = 'test.mp3' with python_utils.open_file(os.path.join(feconf.TESTS_DATA_DIR, self.TEST_AUDIO_FILE_FLAC), 'rb', encoding=None) as f: raw_audio = f.read() response_dict = self.post_json('%s/0' % self.AUDIO_UPLOAD_URL_PREFIX, {'filename': mismatched_filename}, csrf_token=csrf_token, expected_status_int=400, upload_files=(('raw_audio_file', 'unused_filename', raw_audio), )) self.logout() self.assertEqual(response_dict['status_code'], 400) self.assertEqual(response_dict['error'], 'Audio not recognized as a mp3 file')
def test_component_e2e_tests(self): """Tests that an e2e test is defined for all rich text components.""" test_file = os.path.join('extensions', 'rich_text_components', 'protractor.js') rich_text_components_dir = (os.path.join(os.curdir, 'extensions', 'rich_text_components')) actual_components = [ name for name in os.listdir(rich_text_components_dir) if name != '__pycache__' and os.path.isdir(os.path.join(rich_text_components_dir, name)) ] with python_utils.open_file(test_file, 'r') as f: text = f.read() # Replace all spaces and new lines with empty space. text = re.sub(r' ', r'', text) text = re.sub(r'\n', r'', text) # Isolate the text inside the RICH_TEXT_COMPONENTS constant. beginning_sequence = 'varRICH_TEXT_COMPONENTS={' first_bracket_index = text.find(beginning_sequence) last_bracket_index = text.find('};') text_inside_constant = text[first_bracket_index + len(beginning_sequence ):last_bracket_index] + ',' rte_components_with_test = [] while text_inside_constant.find(',') != -1: rte_components_with_test.append( text_inside_constant[0:text_inside_constant.find(':')]) text_inside_constant = text_inside_constant[ text_inside_constant.find(',') + 1:] self.assertEqual(set(actual_components), set(rte_components_with_test))
def test_upload_check_for_duration_sec_as_response(self): """Tests the file upload and trying to confirm the audio file duration_secs is accurate. """ self.login(self.EDITOR_EMAIL) csrf_token = self.get_new_csrf_token() with python_utils.open_file(os.path.join(feconf.TESTS_DATA_DIR, self.TEST_AUDIO_FILE_MP3), 'rb', encoding=None) as f: raw_audio = f.read() response_dict = self.post_json('%s/0' % self.AUDIO_UPLOAD_URL_PREFIX, {'filename': self.TEST_AUDIO_FILE_MP3}, csrf_token=csrf_token, expected_status_int=200, upload_files=(('raw_audio_file', 'unused_filename', raw_audio), )) self.logout() expected_value = ({ 'filename': self.TEST_AUDIO_FILE_MP3, 'duration_secs': 15.255510204081633 }) self.assertEqual(response_dict, expected_value)
def test_permissions_of_file(self): root_temp_dir = tempfile.mkdtemp() temp_dirpath = tempfile.mkdtemp(dir=root_temp_dir) temp_file = tempfile.NamedTemporaryFile(dir=temp_dirpath) temp_file.name = 'temp_file' temp_file_path = os.path.join(temp_dirpath, 'temp_file') with python_utils.open_file(temp_file_path, 'w') as f: f.write('content') common.recursive_chown(root_temp_dir, os.getuid(), -1) common.recursive_chmod(root_temp_dir, 0o744) for root, directories, filenames in os.walk(root_temp_dir): for directory in directories: self.assertEqual( oct( stat.S_IMODE( os.stat(os.path.join(root, directory)).st_mode)), '0o744') self.assertEqual( os.stat(os.path.join(root, directory)).st_uid, os.getuid()) for filename in filenames: self.assertEqual( oct( stat.S_IMODE( os.stat(os.path.join(root, filename)).st_mode)), '0o744') self.assertEqual( os.stat(os.path.join(root, filename)).st_uid, os.getuid()) temp_file.close() shutil.rmtree(root_temp_dir)
def test_inplace_replace_file(self): origin_file = os.path.join('core', 'tests', 'data', 'inplace_replace_test.json') backup_file = os.path.join('core', 'tests', 'data', 'inplace_replace_test.json.bak') expected_lines = [ '{\n', ' "RANDMON1" : "randomValue1",\n', ' "312RANDOM" : "ValueRanDom2",\n', ' "DEV_MODE": true,\n', ' "RAN213DOM" : "raNdoVaLue3"\n', '}\n' ] def mock_remove(unused_file): return remove_swap = self.swap_with_checks(os, 'remove', mock_remove, expected_args=[(backup_file, )]) with remove_swap: common.inplace_replace_file(origin_file, '"DEV_MODE": .*', '"DEV_MODE": true,', expected_number_of_replacements=1) with python_utils.open_file(origin_file, 'r') as f: self.assertEqual(expected_lines, f.readlines()) # Revert the file. os.remove(origin_file) shutil.move(backup_file, origin_file)
def test_topic_creation_with_invalid_image(self): self.login(self.CURRICULUM_ADMIN_EMAIL) csrf_token = self.get_new_csrf_token() payload = { 'name': 'Topic name', 'abbreviatedName': 'name-three', 'description': 'Topic description', 'filename': 'cafe.flac', 'thumbnailBgColor': '#C6DCDA', 'url_fragment': 'name-three' } with python_utils.open_file(os.path.join(feconf.TESTS_DATA_DIR, 'cafe.flac'), 'rb', encoding=None) as f: raw_image = f.read() json_response = self.post_json(self.url, payload, csrf_token=csrf_token, upload_files=(('image', 'unused_filename', raw_image), ), expected_status_int=400) self.assertEqual(json_response['error'], 'Image exceeds file size limit of 100 KB.')
def test_invalid_extension_is_detected(self): """Test that invalid extensions are caught.""" self.login(self.EDITOR_EMAIL) csrf_token = self.get_new_csrf_token() filename_without_extension = 'test' invalid_extension = 'wav' supplied_filename = ('%s.%s' % (filename_without_extension, invalid_extension)) with python_utils.open_file(os.path.join(feconf.TESTS_DATA_DIR, self.TEST_AUDIO_FILE_MP3), 'rb', encoding=None) as f: raw_audio = f.read() response_dict = self.post_json('%s/0' % (self.AUDIO_UPLOAD_URL_PREFIX), {'filename': supplied_filename}, csrf_token=csrf_token, expected_status_int=400, upload_files=(('raw_audio_file', 'unused_filename', raw_audio), )) self.logout() self.assertEqual(response_dict['status_code'], 400) self.assertEqual( response_dict['error'], 'Invalid filename extension: it should have ' 'one of the following extensions: %s' % list(feconf.ACCEPTED_AUDIO_EXTENSIONS.keys()))
def get_stanzas_from_lcov_file(): """Get all stanzas from a lcov file. The lcov file gather all the frontend files that has tests and each one has the following structure: TN: test name SF: file path FNF: total functions FNH: functions covered LF: total lines LH: lines covered BRF: total branches BRH: branches covered end_of_record Returns: list(LcovStanzaRelevantLines). A list with all stanzas. """ f = python_utils.open_file(LCOV_FILE_PATH, 'r') lcov_items_list = f.read().split('end_of_record') stanzas_list = [] for item in lcov_items_list: if item.strip('\n'): stanza = LcovStanzaRelevantLines(item) stanzas_list.append(stanza) return stanzas_list