def test_update_version_in_config_files_updates_version(self): package_json_swap = self.swap( update_changelog_and_credits, 'PACKAGE_JSON_FILEPATH', MOCK_PACKAGE_JSON_PATH ) package_json_content = utils.open_file( MOCK_PACKAGE_JSON_PATH, 'r').read() package_json_regex = re.compile('"version": ".*"') expected_package_json_content = package_json_regex.sub( '"version": "1.2.3"', package_json_content) feconf_swap = self.swap(common, 'FECONF_PATH', MOCK_FECONF_PATH) feconf_content = utils.open_file(MOCK_FECONF_PATH, 'r').read() feconf_regex = re.compile('OPPIA_VERSION = \'.*\'') expected_feconf_content = feconf_regex.sub( 'OPPIA_VERSION = \'1.2.3\'', feconf_content) try: with contextlib.ExitStack() as stack: stack.enter_context(self.branch_name_swap) stack.enter_context(feconf_swap) stack.enter_context(package_json_swap) update_changelog_and_credits.update_version_in_config_files() updated_package_json_content = utils.open_file( MOCK_PACKAGE_JSON_PATH, 'r').read() updated_feconf_content = utils.open_file( MOCK_FECONF_PATH, 'r').read() self.assertEqual( updated_package_json_content, expected_package_json_content) self.assertEqual(updated_feconf_content, expected_feconf_content) finally: write_to_file(MOCK_PACKAGE_JSON_PATH, package_json_content) write_to_file(MOCK_FECONF_PATH, feconf_content)
def test_set_constants_to_default(self): mock_constants_path = 'mock_app_dev.yaml' mock_feconf_path = 'mock_app.yaml' constants_path_swap = self.swap(common, 'CONSTANTS_FILE_PATH', mock_constants_path) feconf_path_swap = self.swap(common, 'FECONF_PATH', mock_feconf_path) constants_temp_file = tempfile.NamedTemporaryFile() constants_temp_file.name = mock_constants_path with utils.open_file(mock_constants_path, 'w') as tmp: tmp.write('export = {\n') tmp.write(' "DEV_MODE": false,\n') tmp.write(' "EMULATOR_MODE": false,\n') tmp.write('};') feconf_temp_file = tempfile.NamedTemporaryFile() feconf_temp_file.name = mock_feconf_path with utils.open_file(mock_feconf_path, 'w') as tmp: tmp.write(u'ENABLE_MAINTENANCE_MODE = True') with constants_path_swap, feconf_path_swap: build.set_constants_to_default() with utils.open_file(mock_constants_path, 'r') as constants_file: self.assertEqual( constants_file.read(), 'export = {\n' ' "DEV_MODE": true,\n' ' "EMULATOR_MODE": true,\n' '};') with utils.open_file(mock_feconf_path, 'r') as feconf_file: self.assertEqual(feconf_file.read(), 'ENABLE_MAINTENANCE_MODE = False') constants_temp_file.close() feconf_temp_file.close()
def inplace_replace_file_context( filename: str, regex_pattern: str, replacement_string: str) -> Generator[None, None, None]: """Context manager in which the file's content is replaced according to the given regex pattern. This function should only be used with files that are processed line by line. Args: filename: str. The name of the file to be changed. regex_pattern: str. The pattern to check. replacement_string: str. The content to be replaced. Yields: None. Nothing. """ backup_filename = '%s.bak' % filename regex = re.compile(regex_pattern) shutil.copyfile(filename, backup_filename) try: with utils.open_file(backup_filename, 'r') as f: new_contents = [regex.sub(replacement_string, line) for line in f] with utils.open_file(filename, 'w') as f: f.write(''.join(new_contents)) yield finally: if os.path.isfile(filename) and os.path.isfile(backup_filename): os.remove(filename) if os.path.isfile(backup_filename): shutil.move(backup_filename, filename)
def update_sorted_file(filepath, new_list): """Updates the files AUTHORS and CONTRIBUTORS with a sorted list of new authors or contributors. Args: filepath: str. The path of the file to update. new_list: list(str). The list of new authors or contributors to add to the file. """ file_lines = [] with utils.open_file(filepath, 'r') as f: file_lines = f.readlines() for line in file_lines: if line.startswith('#'): last_comment_line = line # start_index is the index of line where list of authors/contributors # starts. The line with the last comment is followed by a empty line # and then the sorted list. So, the start_index is the index of # last_comment_line plus 2. start_index = file_lines.index(last_comment_line) + 2 updated_list = list(set(new_list + file_lines[start_index:])) updated_list = sorted(updated_list, key=lambda s: s.lower()) file_lines = file_lines[:start_index] + updated_list with utils.open_file(filepath, 'w') as f: for line in file_lines: f.write(line)
def update_developer_names(release_summary_lines): """Updates about-page.constants.ts file. Args: release_summary_lines: list(str). List of lines in ../release_summary.md. """ print('Updating about-page file...') new_developer_names = get_new_contributors(release_summary_lines, return_only_names=True) with utils.open_file(ABOUT_PAGE_CONSTANTS_FILEPATH, 'r') as about_page_file: about_page_lines = about_page_file.readlines() start_index = about_page_lines.index(CREDITS_START_LINE) + 1 end_index = about_page_lines[start_index:].index(CREDITS_END_LINE) + 1 all_developer_names = about_page_lines[start_index:end_index] for name in new_developer_names: all_developer_names.append('%s\'%s\',\n' % (CREDITS_INDENT, name)) all_developer_names = sorted(list(set(all_developer_names)), key=lambda s: s.lower()) about_page_lines[start_index:end_index] = all_developer_names with utils.open_file(ABOUT_PAGE_CONSTANTS_FILEPATH, 'w') as about_page_file: for line in about_page_lines: about_page_file.write(str(line)) print('Updated about-page file!')
def test_app_yaml_verification_with_wildcard_header_present(self): mailgun_api_key = ('key-%s' % ('').join(['1'] * 32)) mailchimp_api_key = ('%s-us18' % ('').join(['1'] * 32)) temp_feconf_path = tempfile.NamedTemporaryFile().name temp_app_yaml_path = tempfile.NamedTemporaryFile().name feconf_text = ( 'MAILGUN_API_KEY = \'%s\'\n' 'MAILCHIMP_API_KEY = \'%s\'\n' 'REDISHOST = \'192.13.2.1\'\n' '# When the site terms were last updated, in UTC.\n' 'REGISTRATION_PAGE_LAST_UPDATED_UTC = ' 'datetime.datetime(2015, 10, 14, 2, 40, 0)\n' '# Format of string for dashboard statistics logs.\n' '# NOTE TO DEVELOPERS: This format should not be changed, ' 'since it is used in\n' '# the existing storage models for UserStatsModel.\n' 'DASHBOARD_STATS_DATETIME_STRING_FORMAT = \'YY-mm-dd\'\n' % (mailgun_api_key, mailchimp_api_key)) with utils.open_file(temp_feconf_path, 'w') as f: f.write(feconf_text) app_yaml_text = ('- url: /assets\n' ' static_dir: assets\n' ' secure: always\n' ' http_headers:\n' ' Access-Control-Allow-Origin: "*"\n' ' expiration: "0"') with utils.open_file(temp_app_yaml_path, 'w') as f: f.write(app_yaml_text) with self.assertRaisesRegex( Exception, r'\'Access-Control-Allow-Origin: "\*"\' must be updated to ' r'a specific origin before deployment.'): update_configs.verify_config_files(temp_feconf_path, temp_app_yaml_path, True)
def add_mailchimp_api_key(release_feconf_path): """Adds mailchimp api key to feconf config file. Args: release_feconf_path: str. The path to feconf file in release directory. """ mailchimp_api_key = getpass.getpass( prompt=('Enter mailchimp api key from the release process doc.')) mailchimp_api_key = mailchimp_api_key.strip() while re.match('^[a-z0-9]{32}-us18$', mailchimp_api_key) is None: mailchimp_api_key = getpass.getpass( prompt=('You have entered an invalid mailchimp api ' 'key: %s, please retry.' % mailchimp_api_key)) mailchimp_api_key = mailchimp_api_key.strip() feconf_lines = [] with utils.open_file(release_feconf_path, 'r') as f: feconf_lines = f.readlines() error_text = 'Missing mailchimp API key' assert 'MAILCHIMP_API_KEY = None\n' in feconf_lines, error_text with utils.open_file(release_feconf_path, 'w') as f: for line in feconf_lines: if line == 'MAILCHIMP_API_KEY = None\n': line = line.replace('None', '\'%s\'' % mailchimp_api_key) f.write(line)
def test_process_html(self): """Test process_html removes whitespaces.""" base_html_source_path = (os.path.join(MOCK_TEMPLATES_DEV_DIR, 'base.html')) build._ensure_files_exist([base_html_source_path]) # pylint: disable=protected-access minified_html_file_stream = io.StringIO() # Assert that base.html has white spaces and has original filepaths. with utils.open_file(base_html_source_path, 'r') as source_base_file: source_base_file_content = source_base_file.read() self.assertRegex( source_base_file_content, r'\s{2,}', msg='No white spaces detected in %s unexpectedly' % base_html_source_path) # Build base.html file. with utils.open_file(base_html_source_path, 'r') as source_base_file: build.process_html(source_base_file, minified_html_file_stream) minified_html_file_content = minified_html_file_stream.getvalue() self.assertNotRegex(minified_html_file_content, r'\s{2,}', msg='All white spaces must be removed from %s' % base_html_source_path)
def test_invalid_mailchimp_api_key(self): check_prompts = { 'Enter mailchimp api key from the release process doc.': False, 'You have entered an invalid mailchimp api key: invalid, ' 'please retry.': False } expected_check_prompts = { 'Enter mailchimp api key from the release process doc.': True, 'You have entered an invalid mailchimp api key: invalid, ' 'please retry.': True } mailchimp_api_key = ('%s-us18' % ('').join(['1'] * 32)) def mock_getpass(prompt): check_prompts[prompt] = True if 'invalid' in prompt: return mailchimp_api_key return 'invalid' getpass_swap = self.swap(getpass, 'getpass', mock_getpass) temp_feconf_path = tempfile.NamedTemporaryFile().name feconf_text = ( 'REDISHOST = \'192.13.2.1\'\n' 'MAILGUN_API_KEY = None\n' 'MAILCHIMP_API_KEY = None\n' '# When the site terms were last updated, in UTC.\n' 'REGISTRATION_PAGE_LAST_UPDATED_UTC = ' 'datetime.datetime(2015, 10, 14, 2, 40, 0)\n' '# Format of string for dashboard statistics logs.\n' '# NOTE TO DEVELOPERS: This format should not be changed, ' 'since it is used in\n' '# the existing storage models for UserStatsModel.\n' 'DASHBOARD_STATS_DATETIME_STRING_FORMAT = \'YY-mm-dd\'\n') expected_feconf_text = ( 'REDISHOST = \'192.13.2.1\'\n' 'MAILGUN_API_KEY = None\n' 'MAILCHIMP_API_KEY = \'%s\'\n' '# When the site terms were last updated, in UTC.\n' 'REGISTRATION_PAGE_LAST_UPDATED_UTC = ' 'datetime.datetime(2015, 10, 14, 2, 40, 0)\n' '# Format of string for dashboard statistics logs.\n' '# NOTE TO DEVELOPERS: This format should not be changed, ' 'since it is used in\n' '# the existing storage models for UserStatsModel.\n' 'DASHBOARD_STATS_DATETIME_STRING_FORMAT = \'YY-mm-dd\'\n' % (mailchimp_api_key)) with utils.open_file(temp_feconf_path, 'w') as f: f.write(feconf_text) with getpass_swap: update_configs.add_mailchimp_api_key(temp_feconf_path) self.assertEqual(check_prompts, expected_check_prompts) with utils.open_file(temp_feconf_path, 'r') as f: self.assertEqual(f.read(), expected_feconf_text)
def setUp(self) -> None: super(ImageServicesUnitTests, self).setUp() with utils.open_file( os.path.join(feconf.TESTS_DATA_DIR, 'dummy_large_image.jpg'), 'rb', encoding=None) as f: self.jpeg_raw_image = f.read() with utils.open_file( os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb', encoding=None) as f: self.png_raw_image = f.read()
def inplace_replace_file( filename: str, regex_pattern: str, replacement_string: str, expected_number_of_replacements: Optional[int] = None) -> None: """Replace the file content in-place with regex pattern. The pattern is used to replace the file's content line by line. Note: This function should only be used with files that are processed line by line. Args: filename: str. The name of the file to be changed. regex_pattern: str. The pattern to check. replacement_string: str. The content to be replaced. expected_number_of_replacements: optional(int). The number of replacements that should be made. When None no check is done. Raises: ValueError. Wrong number of replacements. Exception. The content failed to get replaced. """ backup_filename = '%s.bak' % filename shutil.copyfile(filename, backup_filename) new_contents = [] total_number_of_replacements = 0 try: regex = re.compile(regex_pattern) with utils.open_file(backup_filename, 'r') as f: for line in f: new_line, number_of_replacements = regex.subn( replacement_string, line) new_contents.append(new_line) total_number_of_replacements += number_of_replacements with utils.open_file(filename, 'w') as f: for line in new_contents: f.write(line) if (expected_number_of_replacements is not None and total_number_of_replacements != expected_number_of_replacements): raise ValueError( 'Wrong number of replacements. Expected %s. Performed %s.' % (expected_number_of_replacements, total_number_of_replacements)) os.remove(backup_filename) except Exception: # Restore the content if there was en error. os.remove(filename) shutil.move(backup_filename, filename) raise
def test_get_refs(self): temp_stdin_file = tempfile.NamedTemporaryFile().name with utils.open_file(temp_stdin_file, 'w') as f: f.write('local_ref local_sha1 remote_ref remote_sha1') with utils.open_file(temp_stdin_file, 'r') as f: with self.swap(sys, 'stdin', f): self.assertEqual( pre_push_hook.get_refs(), [ pre_push_hook.GitRef( local_ref='local_ref', local_sha1='local_sha1', remote_ref='remote_ref', remote_sha1='remote_sha1' )])
def test_update_developer_names(self): with utils.open_file( update_changelog_and_credits.ABOUT_PAGE_CONSTANTS_FILEPATH, 'r' ) as f: about_page_lines = f.readlines() start_index = about_page_lines.index( update_changelog_and_credits.CREDITS_START_LINE) + 1 end_index = about_page_lines[start_index:].index( update_changelog_and_credits.CREDITS_END_LINE) + 1 existing_developer_names = about_page_lines[start_index:end_index] tmp_file = tempfile.NamedTemporaryFile() tmp_file.name = MOCK_ABOUT_PAGE_CONSTANTS_FILEPATH with utils.open_file( MOCK_ABOUT_PAGE_CONSTANTS_FILEPATH, 'w' ) as f: for line in about_page_lines: f.write(str(line)) release_summary_lines = read_from_file(MOCK_RELEASE_SUMMARY_FILEPATH) new_developer_names = update_changelog_and_credits.get_new_contributors( release_summary_lines, return_only_names=True) expected_developer_names = existing_developer_names for name in new_developer_names: expected_developer_names.append('%s\'%s\',\n' % ( update_changelog_and_credits.CREDITS_INDENT, name)) expected_developer_names = sorted( list(set(expected_developer_names)), key=lambda s: s.lower()) with self.swap( update_changelog_and_credits, 'ABOUT_PAGE_CONSTANTS_FILEPATH', MOCK_ABOUT_PAGE_CONSTANTS_FILEPATH): update_changelog_and_credits.update_developer_names( release_summary_lines) with utils.open_file(tmp_file.name, 'r') as f: about_page_lines = f.readlines() start_index = about_page_lines.index( update_changelog_and_credits.CREDITS_START_LINE) + 1 end_index = about_page_lines[start_index:].index( update_changelog_and_credits.CREDITS_END_LINE) + 1 actual_developer_names = about_page_lines[start_index:end_index] self.assertEqual(actual_developer_names, expected_developer_names) tmp_file.close() if os.path.isfile(MOCK_ABOUT_PAGE_CONSTANTS_FILEPATH): # Occasionally this temp file is not deleted. os.remove(MOCK_ABOUT_PAGE_CONSTANTS_FILEPATH)
def update_changelog( branch_name, release_summary_lines, current_release_version_number): """Updates CHANGELOG file. Args: branch_name: str. The name of the current branch. release_summary_lines: list(str). List of lines in ../release_summary.md. current_release_version_number: str. The version of current release. """ print('Updating Changelog...') start_index = release_summary_lines.index( constants.release_constants.CHANGELOG_HEADER) + 1 end_index = release_summary_lines.index( constants.release_constants.COMMIT_HISTORY_HEADER) release_version_changelog = [ u'v%s (%s)\n' % (current_release_version_number, CURRENT_DATE), u'------------------------\n'] + release_summary_lines[ start_index:end_index] changelog_lines = [] with utils.open_file(CHANGELOG_FILEPATH, 'r') as changelog_file: changelog_lines = changelog_file.readlines() if constants.release_constants.BRANCH_TYPE_HOTFIX in branch_name: previous_release_version = get_previous_release_version( constants.release_constants.BRANCH_TYPE_HOTFIX, current_release_version_number) changelog_lines = remove_repetition_from_changelog( current_release_version_number, previous_release_version, changelog_lines) else: previous_release_version = get_previous_release_version( constants.release_constants.BRANCH_TYPE_RELEASE, current_release_version_number) # Update only if changelog is generated before and contains info for # current version. if any( line.startswith( 'v%s' % current_release_version_number ) for line in changelog_lines): changelog_lines = remove_repetition_from_changelog( current_release_version_number, previous_release_version, changelog_lines) changelog_lines[2:2] = release_version_changelog with utils.open_file(CHANGELOG_FILEPATH, 'w') as changelog_file: for line in changelog_lines: changelog_file.write(line) print('Updated Changelog!')
def apply_changes_based_on_config(local_filepath, config_filepath, expected_config_line_regex): """Updates the local file based on the deployment configuration specified in the config file. Each line of the config file should match the expected config line regex. Args: local_filepath: str. Absolute path of the local file to be modified. config_filepath: str. Absolute path of the config file to use. expected_config_line_regex: str. The regex to use to verify each line of the config file. It should have a single group, which corresponds to the prefix to extract. Raises: Exception. Line(s) in config file are not matching with the regex. """ with utils.open_file(config_filepath, 'r') as config_file: config_lines = config_file.read().splitlines() with utils.open_file(local_filepath, 'r') as local_file: local_lines = local_file.read().splitlines() local_filename = os.path.basename(local_filepath) config_filename = os.path.basename(config_filepath) # First, verify the config file. local_line_numbers = [] for config_line in config_lines: match_result = re.match(expected_config_line_regex, config_line) if match_result is None: raise Exception('Invalid line in %s config file: %s' % (config_filename, config_line)) matching_local_line_numbers = [ line_number for (line_number, line) in enumerate(local_lines) if line.startswith(match_result.group(1)) ] assert len(matching_local_line_numbers) == 1, ( 'Could not find correct number of lines in %s matching: %s' % (local_filename, config_line)) local_line_numbers.append(matching_local_line_numbers[0]) # Then, apply the changes. for index, config_line in enumerate(config_lines): local_lines[local_line_numbers[index]] = config_line with utils.open_file(local_filepath, 'w') as writable_local_file: writable_local_file.write('\n'.join(local_lines) + '\n')
def test_changes_are_applied_to_config(self): with utils.open_file(MOCK_LOCAL_FECONF_PATH, 'r') as f: original_text = f.read() expected_text = original_text.replace( 'INCOMING_EMAILS_DOMAIN_NAME = \'\'', 'INCOMING_EMAILS_DOMAIN_NAME = \'oppia.org\'') try: update_configs.apply_changes_based_on_config( MOCK_LOCAL_FECONF_PATH, VALID_FECONF_CONFIG_PATH, update_configs.FECONF_REGEX) with utils.open_file(MOCK_LOCAL_FECONF_PATH, 'r') as f: self.assertEqual(f.read(), expected_text) finally: with utils.open_file(MOCK_LOCAL_FECONF_PATH, 'w') as f: f.write(original_text)
def test_save_original_and_compressed_versions_of_svg_image(self) -> None: with utils.open_file(os.path.join(feconf.TESTS_DATA_DIR, 'test_svg.svg'), 'rb', encoding=None) as f: image_content = f.read() with self.swap(constants, 'DEV_MODE', False): fs = fs_services.GcsFileSystem(feconf.ENTITY_TYPE_EXPLORATION, self.EXPLORATION_ID) self.assertFalse(fs.isfile('image/%s' % self.FILENAME)) self.assertFalse( fs.isfile('image/%s' % self.COMPRESSED_IMAGE_FILENAME)) self.assertFalse(fs.isfile('image/%s' % self.MICRO_IMAGE_FILENAME)) fs_services.save_original_and_compressed_versions_of_image( self.FILENAME, 'exploration', self.EXPLORATION_ID, image_content, 'image', False) self.assertTrue(fs.isfile('image/%s' % self.FILENAME)) self.assertTrue( fs.isfile('image/%s' % self.COMPRESSED_IMAGE_FILENAME)) self.assertTrue(fs.isfile('image/%s' % self.MICRO_IMAGE_FILENAME)) original_image_content = fs.get('image/%s' % self.FILENAME) compressed_image_content = fs.get('image/%s' % self.COMPRESSED_IMAGE_FILENAME) micro_image_content = fs.get('image/%s' % self.MICRO_IMAGE_FILENAME) self.assertEqual(original_image_content, image_content) self.assertEqual(compressed_image_content, image_content) self.assertEqual(micro_image_content, image_content)
def test_inplace_replace_file(self) -> None: origin_file = os.path.join('core', 'tests', 'data', 'inplace_replace_test.json') backup_file = os.path.join('core', 'tests', 'data', 'inplace_replace_test.json.bak') expected_lines = [ '{\n', ' "RANDMON1" : "randomValue1",\n', ' "312RANDOM" : "ValueRanDom2",\n', ' "DEV_MODE": true,\n', ' "RAN213DOM" : "raNdoVaLue3"\n', '}\n' ] def mock_remove(unused_file: str) -> None: return remove_swap = self.swap_with_checks(os, 'remove', mock_remove, expected_args=[(backup_file, )]) with remove_swap: common.inplace_replace_file(origin_file, '"DEV_MODE": .*', '"DEV_MODE": true,', expected_number_of_replacements=1) with utils.open_file(origin_file, 'r') as f: self.assertEqual(expected_lines, f.readlines()) # Revert the file. os.remove(origin_file) shutil.move(backup_file, origin_file)
def test_permissions_of_file(self) -> None: root_temp_dir = tempfile.mkdtemp() temp_dirpath = tempfile.mkdtemp(dir=root_temp_dir) temp_file = tempfile.NamedTemporaryFile(dir=temp_dirpath) # Here MyPy assumes that the 'name' attribute is read-only. In order to # silence the MyPy complaints `setattr` is used to set the attribute. setattr(temp_file, 'name', 'temp_file') temp_file_path = os.path.join(temp_dirpath, 'temp_file') with utils.open_file(temp_file_path, 'w') as f: f.write('content') common.recursive_chown(root_temp_dir, os.getuid(), -1) common.recursive_chmod(root_temp_dir, 0o744) for root, directories, filenames in os.walk(root_temp_dir): for directory in directories: self.assertEqual( oct( stat.S_IMODE( os.stat(os.path.join(root, directory)).st_mode)), '0o744') self.assertEqual( os.stat(os.path.join(root, directory)).st_uid, os.getuid()) for filename in filenames: self.assertEqual( oct( stat.S_IMODE( os.stat(os.path.join(root, filename)).st_mode)), '0o744') self.assertEqual( os.stat(os.path.join(root, filename)).st_uid, os.getuid()) temp_file.close() shutil.rmtree(root_temp_dir)
def test_copy_images(self): with utils.open_file(os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb', encoding=None) as f: original_image_content = f.read() fs_services.save_original_and_compressed_versions_of_image( self.FILENAME, 'exploration', self.EXPLORATION_ID, original_image_content, 'image', True) destination_fs = fs_domain.AbstractFileSystem( fs_domain.GcsFileSystem(feconf.ENTITY_TYPE_QUESTION, 'question_id1')) self.assertFalse(destination_fs.isfile('image/%s' % self.FILENAME)) self.assertFalse( destination_fs.isfile('image/%s' % self.COMPRESSED_IMAGE_FILENAME)) self.assertFalse( destination_fs.isfile('image/%s' % self.MICRO_IMAGE_FILENAME)) fs_services.copy_images(feconf.ENTITY_TYPE_EXPLORATION, self.EXPLORATION_ID, feconf.ENTITY_TYPE_QUESTION, 'question_id1', ['image.png']) self.assertTrue(destination_fs.isfile('image/%s' % self.FILENAME)) self.assertTrue( destination_fs.isfile('image/%s' % self.COMPRESSED_IMAGE_FILENAME)) self.assertTrue( destination_fs.isfile('image/%s' % self.MICRO_IMAGE_FILENAME))
def test_story_creation_fails_with_duplicate_story_url_fragment(self): self.login(self.CURRICULUM_ADMIN_EMAIL) csrf_token = self.get_new_csrf_token() payload = { 'title': 'Story title', 'description': 'Story Description', 'filename': 'test_svg.svg', 'thumbnailBgColor': '#F8BF74', 'story_url_fragment': 'original' } self.save_new_story(story_services.get_new_story_id(), self.admin_id, topic_fetchers.get_new_topic_id(), title='title', description='description', notes='note', url_fragment='original') with utils.open_file(os.path.join(feconf.TESTS_DATA_DIR, 'test_svg.svg'), 'rb', encoding=None) as f: raw_image = f.read() json_response = self.post_json( '%s/%s' % (feconf.TOPIC_EDITOR_STORY_URL, self.topic_id), payload, csrf_token=csrf_token, upload_files=(('image', 'unused_filename', raw_image), ), expected_status_int=400) self.assertEqual(json_response['error'], 'Story url fragment is not unique across the site.')
def test_story_creation_fails_with_invalid_image(self): self.login(self.CURRICULUM_ADMIN_EMAIL) csrf_token = self.get_new_csrf_token() payload = { 'title': 'Story title', 'description': 'Story Description', 'filename': 'cafe.flac', 'thumbnailBgColor': '#F8BF74', 'story_url_fragment': 'story-frag-two' } with utils.open_file(os.path.join(feconf.TESTS_DATA_DIR, 'cafe.flac'), 'rb', encoding=None) as f: raw_image = f.read() json_response = self.post_json( '%s/%s' % (feconf.TOPIC_EDITOR_STORY_URL, self.topic_id), payload, csrf_token=csrf_token, upload_files=(('image', 'unused_filename', raw_image), ), expected_status_int=400) self.assertEqual(json_response['error'], 'Image exceeds file size limit of 100 KB.')
def test_story_creation_with_invalid_description(self): self.login(self.CURRICULUM_ADMIN_EMAIL) csrf_token = self.get_new_csrf_token() payload = { 'title': 'Story title', 'description': 'Story Description' * 60, 'filename': 'test_svg.svg', 'thumbnailBgColor': '#F8BF74', 'story_url_fragment': 'story-frag-one' } with utils.open_file(os.path.join(feconf.TESTS_DATA_DIR, 'test_svg.svg'), 'rb', encoding=None) as f: raw_image = f.read() json_response = self.post_json( '%s/%s' % (feconf.TOPIC_EDITOR_STORY_URL, self.topic_id), payload, csrf_token=csrf_token, upload_files=(('image', 'unused_filename', raw_image), ), expected_status_int=400) invalid_description = 'Story Description' * 60 self.assertEqual( json_response['error'], 'Schema validation for \'description\' failed: ' 'Validation failed: has_length_at_most ' f'({{\'max_value\': {constants.MAX_CHARS_IN_STORY_DESCRIPTION}}}) ' f'for object {invalid_description}') self.logout()
def test_story_creation_with_valid_description(self): self.login(self.CURRICULUM_ADMIN_EMAIL) csrf_token = self.get_new_csrf_token() payload = { 'title': 'Story title', 'description': 'Story Description', 'filename': 'test_svg.svg', 'thumbnailBgColor': '#F8BF74', 'story_url_fragment': 'story-frag-one' } with utils.open_file(os.path.join(feconf.TESTS_DATA_DIR, 'test_svg.svg'), 'rb', encoding=None) as f: raw_image = f.read() json_response = self.post_json( '%s/%s' % (feconf.TOPIC_EDITOR_STORY_URL, self.topic_id), payload, csrf_token=csrf_token, upload_files=(('image', 'unused_filename', raw_image), )) story_id = json_response['storyId'] self.assertEqual(len(story_id), 12) self.assertIsNotNone( story_fetchers.get_story_by_id(story_id, strict=False)) self.logout()
def test_permissions_of_file(self): root_temp_dir = tempfile.mkdtemp() temp_dirpath = tempfile.mkdtemp(dir=root_temp_dir) temp_file = tempfile.NamedTemporaryFile(dir=temp_dirpath) temp_file.name = 'temp_file' temp_file_path = os.path.join(temp_dirpath, 'temp_file') with utils.open_file(temp_file_path, 'w') as f: f.write('content') common.recursive_chown(root_temp_dir, os.getuid(), -1) common.recursive_chmod(root_temp_dir, 0o744) for root, directories, filenames in os.walk(root_temp_dir): for directory in directories: self.assertEqual( oct( stat.S_IMODE( os.stat(os.path.join(root, directory)).st_mode)), '0o744') self.assertEqual( os.stat(os.path.join(root, directory)).st_uid, os.getuid()) for filename in filenames: self.assertEqual( oct( stat.S_IMODE( os.stat(os.path.join(root, filename)).st_mode)), '0o744') self.assertEqual( os.stat(os.path.join(root, filename)).st_uid, os.getuid()) temp_file.close() shutil.rmtree(root_temp_dir)
def check_filenames_in_tsconfig_strict_are_sorted(self): """Checks if the files in strict TS config are sorted alphabetically. Returns: TaskResult. A TaskResult object representing the result of the lint check. """ name = 'Sorted strict TS config' failed = False error_messages = [] with utils.open_file(STRICT_TS_CONFIG_FILEPATH, 'r') as f: strict_ts_config = json.load(f) # Remove .ts extension from filepath for sorting to ensure that # spec files are always below the main files. files = [path[:-3] for path in strict_ts_config['files']] sorted_files = sorted(files) if files != sorted_files: failed = True error_message = ('Files in %s are not alphabetically sorted.' % (STRICT_TS_CONFIG_FILE_NAME)) error_messages.append(error_message) return concurrent_task_utils.TaskResult(name, failed, error_messages, error_messages)
def test_upload_check_for_duration_sec_as_response(self): """Tests the file upload and trying to confirm the audio file duration_secs is accurate. """ self.login(self.EDITOR_EMAIL) csrf_token = self.get_new_csrf_token() with utils.open_file(os.path.join(feconf.TESTS_DATA_DIR, self.TEST_AUDIO_FILE_MP3), 'rb', encoding=None) as f: raw_audio = f.read() response_dict = self.post_json('%s/0' % self.AUDIO_UPLOAD_URL_PREFIX, {'filename': self.TEST_AUDIO_FILE_MP3}, csrf_token=csrf_token, expected_status_int=200, upload_files=(('raw_audio_file', 'unused_filename', raw_audio), )) self.logout() expected_value = ({ 'filename': self.TEST_AUDIO_FILE_MP3, 'duration_secs': 15.255510204081633 }) self.assertEqual(response_dict, expected_value)
def test_non_matching_extensions_are_detected(self): """Test that filenames with extensions that don't match the audio are detected. """ self.login(self.EDITOR_EMAIL) csrf_token = self.get_new_csrf_token() # Use an accepted audio extension in mismatched_filename # that differs from the uploaded file's audio type. mismatched_filename = 'test.mp3' with utils.open_file(os.path.join(feconf.TESTS_DATA_DIR, self.TEST_AUDIO_FILE_FLAC), 'rb', encoding=None) as f: raw_audio = f.read() response_dict = self.post_json('%s/0' % self.AUDIO_UPLOAD_URL_PREFIX, {'filename': mismatched_filename}, csrf_token=csrf_token, expected_status_int=400, upload_files=(('raw_audio_file', 'unused_filename', raw_audio), )) self.logout() self.assertEqual(response_dict['status_code'], 400) self.assertEqual(response_dict['error'], 'Audio not recognized as a mp3 file')
def test_invalid_extension_is_detected(self): """Test that invalid extensions are caught.""" self.login(self.EDITOR_EMAIL) csrf_token = self.get_new_csrf_token() filename_without_extension = 'test' invalid_extension = 'wav' supplied_filename = ('%s.%s' % (filename_without_extension, invalid_extension)) with utils.open_file(os.path.join(feconf.TESTS_DATA_DIR, self.TEST_AUDIO_FILE_MP3), 'rb', encoding=None) as f: raw_audio = f.read() response_dict = self.post_json('%s/0' % (self.AUDIO_UPLOAD_URL_PREFIX), {'filename': supplied_filename}, csrf_token=csrf_token, expected_status_int=400, upload_files=(('raw_audio_file', 'unused_filename', raw_audio), )) self.logout() self.assertEqual(response_dict['status_code'], 400) self.assertEqual( response_dict['error'], 'Invalid filename extension: it should have ' 'one of the following extensions: %s' % list(feconf.ACCEPTED_AUDIO_EXTENSIONS.keys()))
def test_detect_non_matching_extensions(self): self.login(self.EDITOR_EMAIL) csrf_token = self.get_new_csrf_token() # Use an accepted audio extension in mismatched_filename # that differs from the uploaded file's audio type. mismatched_filename = 'test.flac' with utils.open_file(os.path.join(feconf.TESTS_DATA_DIR, self.TEST_AUDIO_FILE_MP3), 'rb', encoding=None) as f: raw_audio = f.read() with self.accepted_audio_extensions_swap: response_dict = self.post_json( '%s/0' % self.AUDIO_UPLOAD_URL_PREFIX, {'filename': mismatched_filename}, csrf_token=csrf_token, expected_status_int=400, upload_files=[('raw_audio_file', mismatched_filename, raw_audio)]) self.logout() self.assertIn( 'Although the filename extension indicates the file is a flac ' 'file, it was not recognized as one. Found mime types:', response_dict['error'])