def main():
    """Collects necessary info and dumps it to disk."""
    branch_name = common.get_current_branch_name()
    if not re.match(r'release-\d+\.\d+\.\d+$', branch_name):
        raise Exception(
            'This script should only be run from the latest release branch.')

    if not os.path.exists(feconf.RELEASE_SUMMARY_FILEPATH):
        raise Exception('Release summary file %s is missing. Please run the '
                        'release_info.py script and re-run this script.' %
                        (feconf.RELEASE_SUMMARY_FILEPATH))

    parsed_args = _PARSER.parse_args()
    if parsed_args.github_username is None:
        raise Exception(
            'No GitHub username provided. Please re-run the '
            'script specifying a username using --username=<Your username>')
    github_username = parsed_args.github_username

    personal_access_token = getpass.getpass(
        prompt=('Please provide personal access token for your github ID. '
                'You can create one at https://github.com/settings/tokens: '))

    if personal_access_token is None:
        raise Exception(
            'No personal access token provided, please set up a personal '
            'access token at https://github.com/settings/tokens and re-run '
            'the script')
    g = github.Github(personal_access_token)
    repo_fork = g.get_repo('%s/oppia' % github_username)

    current_release_version = branch_name[len(common.RELEASE_BRANCH_NAME_PREFIX
                                              ):]
    target_branch = 'update-changelog-for-releasev%s' % current_release_version

    remove_updates_and_delete_branch(repo_fork, target_branch)

    message = ('Please update %s to:\n- have a correct changelog for '
               'updating the CHANGELOG file\n- have a correct list of new '
               'authors and contributors to update AUTHORS, CONTRIBUTORS '
               'and developer_names section in about-page.directive.html\n' %
               (feconf.RELEASE_SUMMARY_FILEPATH))
    ask_user_to_confirm(message)

    release_summary_lines = []
    with python_utils.open_file(feconf.RELEASE_SUMMARY_FILEPATH,
                                'r') as release_summary_file:
        release_summary_lines = release_summary_file.readlines()

    check_ordering_of_sections(release_summary_lines)

    update_changelog(release_summary_lines, current_release_version)
    update_authors(release_summary_lines)
    update_contributors(release_summary_lines)
    update_developer_names(release_summary_lines)

    message = ('Please check the changes and make updates if required in the '
               'following files:\n1. %s\n2. %s\n3. %s\n4. %s\n' %
               (CHANGELOG_FILEPATH, AUTHORS_FILEPATH, CONTRIBUTORS_FILEPATH,
                ABOUT_PAGE_FILEPATH))
    ask_user_to_confirm(message)

    create_branch(repo_fork, target_branch, github_username)
 def mock_read_protractor_conf_file():
     protractor_config_file = python_utils.open_file(
         os.path.join(DUMMY_CONF_FILES, 'dummy_protractor.conf.js'),
         'r').read()
     return protractor_config_file
def main():
    """Install third-party libraries for Oppia."""
    setup.main(args=[])
    setup_gae.main(args=[])
    pip_dependencies = [
        ('coverage', common.COVERAGE_VERSION, common.OPPIA_TOOLS_DIR),
        ('pylint', '1.9.4', common.OPPIA_TOOLS_DIR),
        ('Pillow', '6.0.0', common.OPPIA_TOOLS_DIR),
        ('pylint-quotes', '0.1.8', common.OPPIA_TOOLS_DIR),
        ('webtest', '2.0.33', common.OPPIA_TOOLS_DIR),
        ('isort', '4.3.20', common.OPPIA_TOOLS_DIR),
        ('pycodestyle', '2.5.0', common.OPPIA_TOOLS_DIR),
        ('esprima', '4.0.1', common.OPPIA_TOOLS_DIR),
        ('browsermob-proxy', '0.8.0', common.OPPIA_TOOLS_DIR),
        ('selenium', '3.13.0', common.OPPIA_TOOLS_DIR),
        ('PyGithub', '1.43.7', common.OPPIA_TOOLS_DIR),
    ]

    for package, version, path in pip_dependencies:
        ensure_pip_library_is_installed(package, version, path)

    # Do a little surgery on configparser in pylint-1.9.4 to remove dependency
    # on ConverterMapping, which is not implemented in some Python
    # distributions.
    pylint_newlines = []
    with python_utils.open_file(PYLINT_CONFIGPARSER_FILEPATH, 'r') as f:
        for line in f.readlines():
            if line.strip() == 'ConverterMapping,':
                continue
            if line.strip().endswith('"ConverterMapping",'):
                pylint_newlines.append(line[:line.find('"ConverterMapping"')] +
                                       '\n')
            else:
                pylint_newlines.append(line)
    with python_utils.open_file(PYLINT_CONFIGPARSER_FILEPATH, 'w+') as f:
        f.writelines(pylint_newlines)

    # Do similar surgery on configparser in pylint-quotes-0.1.8 to remove
    # dependency on ConverterMapping.
    pq_newlines = []
    with python_utils.open_file(PQ_CONFIGPARSER_FILEPATH, 'r') as f:
        for line in f.readlines():
            if line.strip() == 'ConverterMapping,':
                continue
            if line.strip() == '"ConverterMapping",':
                continue
            pq_newlines.append(line)
    with python_utils.open_file(PQ_CONFIGPARSER_FILEPATH, 'w+') as f:
        f.writelines(pq_newlines)

    # Download and install required JS and zip files.
    python_utils.PRINT('Installing third-party JS libraries and zip files.')
    install_third_party.main(args=[])

    if common.is_windows_os():
        tweak_yarn_executable()

    # Install third-party node modules needed for the build process.
    subprocess.check_call([get_yarn_command()])

    # Install pre-commit script.
    python_utils.PRINT('Installing pre-commit hook for git')
    pre_commit_hook.main(args=['--install'])

    # TODO(#8112): Once pre_commit_linter is working correctly, this
    # condition should be removed.
    if not common.is_windows_os():
        # Install pre-push script.
        python_utils.PRINT('Installing pre-push hook for git')
        pre_push_hook.main(args=['--install'])
def main():
    """Install third-party libraries for Oppia."""
    setup.main(args=[])
    setup_gae.main(args=[])
    # These system python libraries are REQUIRED to start the development server
    # and cannot be added to oppia_tools because the dev_appserver python script
    # looks for them in the default system paths when it is run. Therefore, we
    # must install these libraries to the developer's computer.
    system_pip_dependencies = [('enum34', common.ENUM_VERSION),
                               ('protobuf', common.PROTOBUF_VERSION)]
    local_pip_dependencies = [
        ('coverage', common.COVERAGE_VERSION, common.OPPIA_TOOLS_DIR),
        ('pylint', common.PYLINT_VERSION, common.OPPIA_TOOLS_DIR),
        ('Pillow', common.PILLOW_VERSION, common.OPPIA_TOOLS_DIR),
        ('pylint-quotes', common.PYLINT_QUOTES_VERSION,
         common.OPPIA_TOOLS_DIR),
        ('webtest', common.WEBTEST_VERSION, common.OPPIA_TOOLS_DIR),
        ('isort', common.ISORT_VERSION, common.OPPIA_TOOLS_DIR),
        ('pycodestyle', common.PYCODESTYLE_VERSION, common.OPPIA_TOOLS_DIR),
        ('esprima', common.ESPRIMA_VERSION, common.OPPIA_TOOLS_DIR),
        ('PyGithub', common.PYGITHUB_VERSION, common.OPPIA_TOOLS_DIR),
        ('protobuf', common.PROTOBUF_VERSION, common.OPPIA_TOOLS_DIR),
        ('psutil', common.PSUTIL_VERSION, common.OPPIA_TOOLS_DIR),
        ('pip-tools', common.PIP_TOOLS_VERSION, common.OPPIA_TOOLS_DIR),
        ('setuptools', common.SETUPTOOLS_VERSION, common.OPPIA_TOOLS_DIR),
    ]

    for package, version, path in local_pip_dependencies:
        ensure_pip_library_is_installed(package, version, path)

    for package, version in system_pip_dependencies:
        ensure_system_python_libraries_are_installed(package, version)
    # Do a little surgery on configparser in pylint-1.9.4 to remove dependency
    # on ConverterMapping, which is not implemented in some Python
    # distributions.
    pylint_newlines = []
    with python_utils.open_file(PYLINT_CONFIGPARSER_FILEPATH, 'r') as f:
        for line in f.readlines():
            if line.strip() == 'ConverterMapping,':
                continue
            if line.strip().endswith('"ConverterMapping",'):
                pylint_newlines.append(line[:line.find('"ConverterMapping"')] +
                                       '\n')
            else:
                pylint_newlines.append(line)
    with python_utils.open_file(PYLINT_CONFIGPARSER_FILEPATH, 'w+') as f:
        f.writelines(pylint_newlines)

    # Do similar surgery on configparser in pylint-quotes-0.1.8 to remove
    # dependency on ConverterMapping.
    pq_newlines = []
    with python_utils.open_file(PQ_CONFIGPARSER_FILEPATH, 'r') as f:
        for line in f.readlines():
            if line.strip() == 'ConverterMapping,':
                continue
            if line.strip() == '"ConverterMapping",':
                continue
            pq_newlines.append(line)
    with python_utils.open_file(PQ_CONFIGPARSER_FILEPATH, 'w+') as f:
        f.writelines(pq_newlines)

    # Download and install required JS and zip files.
    python_utils.PRINT('Installing third-party JS libraries and zip files.')
    install_third_party.main(args=[])

    # The following steps solves the problem of multiple google paths confusing
    # the python interpreter. Namely, there are two modules named google/, one
    # that is installed with google cloud libraries and another that comes with
    # the Google Cloud SDK. Python cannot import from both paths simultaneously
    # so we must combine the two modules into one. We solve this by copying the
    # Google Cloud SDK libraries that we need into the correct google
    # module directory in the 'third_party/python_libs' directory.
    python_utils.PRINT(
        'Copying Google Cloud SDK modules to third_party/python_libs...')
    correct_google_path = os.path.join(common.THIRD_PARTY_PYTHON_LIBS_DIR,
                                       'google')
    if not os.path.isdir(correct_google_path):
        os.mkdir(correct_google_path)

    if not os.path.isdir(os.path.join(correct_google_path, 'appengine')):
        shutil.copytree(
            os.path.join(common.GOOGLE_APP_ENGINE_SDK_HOME, 'google',
                         'appengine'),
            os.path.join(correct_google_path, 'appengine'))

    if not os.path.isdir(os.path.join(correct_google_path, 'net')):
        shutil.copytree(
            os.path.join(common.GOOGLE_APP_ENGINE_SDK_HOME, 'google', 'net'),
            os.path.join(correct_google_path, 'net'))

    if not os.path.isdir(os.path.join(correct_google_path, 'pyglib')):
        shutil.copytree(
            os.path.join(common.GOOGLE_APP_ENGINE_SDK_HOME,
                         'google', 'pyglib'),
            os.path.join(correct_google_path, 'pyglib'))

    # The following for loop populates all of the google modules with
    # the correct __init__.py files if they do not exist. This solves the bug
    # mentioned below where namespace packages sometimes install modules without
    # __init__.py files (python requires modules to have __init__.py files in
    # in order to recognize them as modules and import them):
    # https://github.com/googleapis/python-ndb/issues/518
    python_utils.PRINT(
        'Checking that all google library modules contain __init__.py files...'
    )
    for path_list in os.walk(correct_google_path):
        root_path = path_list[0]
        if not root_path.endswith('__pycache__'):
            with python_utils.open_file(os.path.join(root_path, '__init__.py'),
                                        'a'):
                # If the file doesn't exist, it is created. If it does exist,
                # this open does nothing.
                pass

    # Compile protobuf files.
    python_utils.PRINT('Installing buf and protoc binary.')
    install_buf_and_protoc()
    python_utils.PRINT('Compiling protobuf files.')
    compile_protobuf_files(PROTO_FILES_PATHS)

    if common.is_windows_os():
        tweak_yarn_executable()

    # Install third-party node modules needed for the build process.
    subprocess.check_call([get_yarn_command(), 'install', '--pure-lockfile'])

    # Install pre-commit script.
    python_utils.PRINT('Installing pre-commit hook for git')
    pre_commit_hook.main(args=['--install'])

    # TODO(#8112): Once pre_commit_linter is working correctly, this
    # condition should be removed.
    if not common.is_windows_os():
        # Install pre-push script.
        python_utils.PRINT('Installing pre-push hook for git')
        pre_push_hook.main(args=['--install'])
Beispiel #5
0
def run_tests(args=None):
    """Run the scripts to start end-to-end tests."""

    parsed_args = _PARSER.parse_args(args=args)
    oppia_instance_is_already_running = is_oppia_server_already_running()

    if oppia_instance_is_already_running:
        sys.exit(1)
    setup_and_install_dependencies(parsed_args.skip_install)

    common.start_redis_server()
    atexit.register(cleanup)

    dev_mode = not parsed_args.prod_env

    if parsed_args.skip_build:
        build.modify_constants(prod_env=parsed_args.prod_env)
    else:
        build_js_files(dev_mode,
                       deparallelize_terser=parsed_args.deparallelize_terser,
                       source_maps=parsed_args.source_maps)
    version = parsed_args.chrome_driver_version or get_chrome_driver_version()
    python_utils.PRINT('\n\nCHROMEDRIVER VERSION: %s\n\n' % version)
    start_webdriver_manager(version)

    portserver_process = start_portserver()
    atexit.register(cleanup_portserver, portserver_process)
    start_google_app_engine_server(dev_mode, parsed_args.server_log_level)

    common.wait_for_port_to_be_open(WEB_DRIVER_PORT)
    common.wait_for_port_to_be_open(GOOGLE_APP_ENGINE_PORT)
    ensure_screenshots_dir_is_removed()
    commands = [common.NODE_BIN_PATH]
    if parsed_args.debug_mode:
        commands.append('--inspect-brk')
    # This flag ensures tests fail if waitFor calls time out.
    commands.append('--unhandled-rejections=strict')
    commands.append(PROTRACTOR_BIN_PATH)
    commands.extend(
        get_e2e_test_parameters(parsed_args.sharding_instances,
                                parsed_args.suite, dev_mode))

    p = subprocess.Popen(commands, stdout=subprocess.PIPE)
    output_lines = []
    while True:
        nextline = p.stdout.readline()
        if len(nextline) == 0 and p.poll() is not None:
            break
        sys.stdout.write(nextline)
        sys.stdout.flush()
        output_lines.append(nextline.strip())

    flaky_tests_list = []
    google_auth_decode_password = os.getenv('GOOGLE_AUTH_DECODE_PASSWORD')
    if google_auth_decode_password is not None:
        with python_utils.open_file('auth.json.enc', 'rb',
                                    encoding=None) as enc_file:
            with python_utils.open_file('auth.json', 'w') as dec_file:
                ciphertext = enc_file.read()
                plaintext = simplecrypt.decrypt(google_auth_decode_password,
                                                ciphertext).decode('utf-8')
                dec_file.write(plaintext)

        sheets_scopes = ['https://www.googleapis.com/auth/spreadsheets']
        creds = service_account.Credentials.from_service_account_file(
            'auth.json', scopes=sheets_scopes)
        sheet = googleapiclient.discovery.build(
            'sheets', 'v4', credentials=creds).spreadsheets()
        flaky_tests_list = get_flaky_tests_data_from_sheets(sheet)

    suite_name = parsed_args.suite.lower()
    if len(flaky_tests_list) > 0 and p.returncode != 0:
        for i, line in enumerate(output_lines):
            if line == '*                    Failures                    *':
                test_name = output_lines[i + 3][3:].strip().lower()

                # Remove coloring characters.
                ansi_escape = re.compile(
                    r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])')
                failure_log = ansi_escape.sub('', output_lines[i + 4])
                failure_log = failure_log.strip().lower()
                for index, row in enumerate(flaky_tests_list):
                    flaky_suite_name = row[0].strip().lower()
                    flaky_test_message = row[1].strip().lower()
                    flaky_error_message = row[2].strip().lower()
                    if (suite_name == flaky_suite_name
                            or flaky_suite_name == '[general]'):
                        if (test_name == flaky_test_message
                                or flaky_test_message == 'many'):
                            if flaky_error_message in failure_log:
                                update_flaky_tests_count(sheet, index, row[3])
                                try:
                                    cleanup_portserver(portserver_process)
                                    cleanup()
                                except Exception:  # pragma: no cover
                                    # This is marked as no cover because the
                                    # exception happens due to some processes
                                    # running on the local system, which might
                                    # interfere with the cleanup stuff. This is
                                    # added as a failsafe to make sure that
                                    # even when it throws an exception, the
                                    # test is retried.
                                    pass  # pragma: no cover
                                return 'flake'
    sys.exit(p.returncode)
def main():
    """Collects necessary info and dumps it to disk."""
    branch_name = common.get_current_branch_name()
    if not common.is_current_branch_a_release_branch():
        raise Exception(
            'This script should only be run from the latest release branch.')

    parsed_args = _PARSER.parse_args()
    if parsed_args.github_username is None:
        raise Exception('No GitHub username provided. Please re-run the '
                        'script specifying a username using '
                        '--github_username=<Your username>')
    github_username = parsed_args.github_username

    personal_access_token = common.get_personal_access_token()

    g = github.Github(personal_access_token)
    repo = g.get_organization('oppia').get_repo('oppia')
    repo_fork = g.get_repo('%s/oppia' % github_username)

    common.check_blocking_bug_issue_count(repo)
    common.check_prs_for_current_release_are_released(repo)

    python_utils.PRINT('Generating release summary...')
    generate_release_info.main(personal_access_token)

    if not os.path.exists(release_constants.RELEASE_SUMMARY_FILEPATH):
        raise Exception('Release summary file %s is missing. Please re-run '
                        'this script.' %
                        release_constants.RELEASE_SUMMARY_FILEPATH)

    current_release_version_number = common.get_current_release_version_number(
        branch_name)
    target_branch = 'update-changelog-for-releasev%s' % (
        current_release_version_number)

    remove_updates_and_delete_branch(repo_fork, target_branch)

    # Opens Credit Form.
    python_utils.PRINT(
        'Note: Make following changes directly to %s and make sure to '
        'save the file after making these changes.' %
        (release_constants.RELEASE_SUMMARY_FILEPATH))

    common.ask_user_to_confirm(
        'Check emails and names for new authors and new contributors in the '
        'file: %s and verify that the emails are '
        'correct through welcome emails sent from [email protected] '
        '(confirm with Sean in case of doubt).' %
        (release_constants.RELEASE_SUMMARY_FILEPATH))
    common.open_new_tab_in_browser_if_possible(
        release_constants.CREDITS_FORM_URL)
    common.ask_user_to_confirm(
        'Check the credits form and add any additional contributors '
        'to the contributor list in the file: %s.' %
        (release_constants.RELEASE_SUMMARY_FILEPATH))
    common.ask_user_to_confirm(
        'Categorize the PR titles in the Uncategorized section of the '
        'changelog in the file: %s, and arrange the changelog '
        'to have user-facing categories on top.' %
        (release_constants.RELEASE_SUMMARY_FILEPATH))
    common.ask_user_to_confirm(
        'Verify each item is in the correct section in the '
        'file: %s and remove trivial changes like "Fix lint errors" '
        'from the changelog.' % (release_constants.RELEASE_SUMMARY_FILEPATH))
    common.ask_user_to_confirm(
        'Ensure that all items in changelog in the file: %s '
        'start with a verb in simple present tense.' %
        (release_constants.RELEASE_SUMMARY_FILEPATH))
    common.ask_user_to_confirm(
        'Please save the file: %s with all the changes that '
        'you have made.' % (release_constants.RELEASE_SUMMARY_FILEPATH))

    release_summary_lines = []
    with python_utils.open_file(release_constants.RELEASE_SUMMARY_FILEPATH,
                                'r') as release_summary_file:
        release_summary_lines = release_summary_file.readlines()

    check_ordering_of_sections(release_summary_lines)

    update_changelog(branch_name, release_summary_lines,
                     current_release_version_number)
    update_authors(release_summary_lines)
    update_contributors(release_summary_lines)
    update_developer_names(release_summary_lines)

    message = ('Please check the changes and make updates if required in the '
               'following files:\n1. %s\n2. %s\n3. %s\n4. %s\n' %
               (CHANGELOG_FILEPATH, AUTHORS_FILEPATH, CONTRIBUTORS_FILEPATH,
                ABOUT_PAGE_FILEPATH))
    common.ask_user_to_confirm(message)

    create_branch(repo_fork, target_branch, github_username,
                  current_release_version_number)
Beispiel #7
0
def execute_deployment():
    """Executes the deployment process after doing the prerequisite checks.

    Raises:
        Exception. App name is invalid.
        Exception. Custom version is used with production app.
        Exception. App name is not specified.
        Exception. The deployment script is not run from a release or test
            branch.
        Exception. The deployment script is run for prod server from a test
            branch.
        Exception. Current release version has '.' character.
        Exception. Last commit message is invalid.
        Exception. The mailgun API key is not added before deployment.
        Exception. Could not find third party directory.
        Exception. Invalid directory accessed during deployment.
    """
    parsed_args = _PARSER.parse_args()
    custom_version = None
    if parsed_args.app_name:
        app_name = parsed_args.app_name
        if app_name not in [APP_NAME_OPPIASERVER, APP_NAME_OPPIATESTSERVER
                            ] and ('migration' not in app_name):
            raise Exception('Invalid app name: %s' % app_name)
        if parsed_args.version and app_name == APP_NAME_OPPIASERVER:
            raise Exception('Cannot use custom version with production app.')
        # Note that custom_version may be None.
        custom_version = parsed_args.version
    else:
        raise Exception('No app name specified.')

    current_branch_name = common.get_current_branch_name()

    release_dir_name = 'deploy-%s-%s-%s' % (
        '-'.join('-'.join(app_name.split('.')).split(':')),
        current_branch_name, CURRENT_DATETIME.strftime('%Y%m%d-%H%M%S'))
    release_dir_path = os.path.join(os.getcwd(), '..', release_dir_name)

    deploy_data_path = os.path.join(os.getcwd(), os.pardir, 'release-scripts',
                                    'deploy_data', app_name)

    install_third_party_libs.main()

    if not (common.is_current_branch_a_release_branch() or
            (common.is_current_branch_a_test_branch())):
        raise Exception(
            'The deployment script must be run from a release or test branch.')
    if common.is_current_branch_a_test_branch() and (app_name in [
            APP_NAME_OPPIASERVER, APP_NAME_OPPIATESTSERVER
    ]):
        raise Exception('Test branch can only be deployed to backup server.')
    if custom_version is not None:
        current_release_version = custom_version.replace(DOT_CHAR, HYPHEN_CHAR)
    else:
        current_release_version = current_branch_name[
            len(common.RELEASE_BRANCH_NAME_PREFIX):].replace(
                DOT_CHAR, HYPHEN_CHAR)

    # This is required to compose the release_version_library_url
    # (defined in switch_version function) correctly.
    if '.' in current_release_version:
        raise Exception('Current release version has \'.\' character.')

    assert len(current_release_version) <= 25, (
        'The length of the "version" arg should be less than or '
        'equal to 25 characters.')

    # Do prerequisite checks.
    common.require_cwd_to_be_oppia()
    common.ensure_release_scripts_folder_exists_and_is_up_to_date()
    gcloud_adapter.require_gcloud_to_be_available()
    try:
        if app_name == APP_NAME_OPPIASERVER:
            check_release_doc()
            release_version_number = common.get_current_release_version_number(
                current_branch_name)
            last_commit_message = subprocess.check_output(
                'git log -1 --pretty=%B'.split())
            personal_access_token = common.get_personal_access_token()
            if not common.is_current_branch_a_hotfix_branch():
                if not last_commit_message.startswith(
                        'Update authors and changelog for v%s' %
                    (release_version_number)):
                    raise Exception('Invalid last commit message: %s.' %
                                    (last_commit_message))
                g = github.Github(personal_access_token)
                repo = g.get_organization('oppia').get_repo('oppia')
                common.check_blocking_bug_issue_count(repo)
                common.check_prs_for_current_release_are_released(repo)

            check_travis_and_circleci_tests(current_branch_name)
            update_configs.main(personal_access_token)
            with python_utils.open_file(common.FECONF_PATH, 'r') as f:
                feconf_contents = f.read()
                if ('MAILGUN_API_KEY' not in feconf_contents
                        or 'MAILGUN_API_KEY = None' in feconf_contents):
                    raise Exception(
                        'The mailgun API key must be added before deployment.')
        if not os.path.exists(THIRD_PARTY_DIR):
            raise Exception(
                'Could not find third_party directory at %s. Please run '
                'install_third_party_libs.py prior to running this script.' %
                THIRD_PARTY_DIR)

        current_git_revision = subprocess.check_output(
            ['git', 'rev-parse', 'HEAD']).strip()

        # Create a folder in which to save the release candidate.
        python_utils.PRINT('Ensuring that the release directory parent exists')
        common.ensure_directory_exists(os.path.dirname(release_dir_path))

        # Copy files to the release directory. Omits the .git subfolder.
        python_utils.PRINT('Copying files to the release directory')
        shutil.copytree(os.getcwd(),
                        release_dir_path,
                        ignore=shutil.ignore_patterns('.git'))

        # Change the current directory to the release candidate folder.
        with common.CD(release_dir_path):
            if not os.getcwd().endswith(release_dir_name):
                raise Exception(
                    'Invalid directory accessed during deployment: %s' %
                    os.getcwd())

            python_utils.PRINT('Changing directory to %s' % os.getcwd())

            python_utils.PRINT('Preprocessing release...')
            preprocess_release(app_name, deploy_data_path)

            update_and_check_indexes(app_name)
            build_scripts(parsed_args.maintenance_mode)
            deploy_application_and_write_log_entry(app_name,
                                                   current_release_version,
                                                   current_git_revision)

            python_utils.PRINT('Returning to oppia/ root directory.')

        switch_version(app_name, current_release_version)
        flush_memcache(app_name)
        check_breakage(app_name, current_release_version)

        python_utils.PRINT('Done!')
    finally:
        common.run_cmd([
            'git', 'checkout', '--', update_configs.LOCAL_FECONF_PATH,
            update_configs.LOCAL_CONSTANTS_PATH
        ])
Beispiel #8
0
def main(args=None):
    """Install third-party libraries for Oppia."""
    parsed_args = _PARSER.parse_args(args=args)

    setup.main(args=[])
    setup_gae.main(args=[])
    pip_dependencies = [
        ('pylint', '1.9.4', common.OPPIA_TOOLS_DIR),
        ('Pillow', '6.0.0', common.OPPIA_TOOLS_DIR),
        ('pylint-quotes', '0.1.8', common.OPPIA_TOOLS_DIR),
        ('webtest', '2.0.33', common.OPPIA_TOOLS_DIR),
        ('isort', '4.3.20', common.OPPIA_TOOLS_DIR),
        ('pycodestyle', '2.5.0', common.OPPIA_TOOLS_DIR),
        ('esprima', '4.0.1', common.OPPIA_TOOLS_DIR),
        ('browsermob-proxy', '0.8.0', common.OPPIA_TOOLS_DIR),
        ('selenium', '3.13.0', common.OPPIA_TOOLS_DIR),
        ('PyGithub', '1.43.7', common.OPPIA_TOOLS_DIR),
    ]

    for package, version, path in pip_dependencies:
        ensure_pip_library_is_installed(package, version, path)

    # Do a little surgery on configparser in pylint-1.9.4 to remove dependency
    # on ConverterMapping, which is not implemented in some Python
    # distributions.
    pylint_configparser_filepath = os.path.join(
        common.OPPIA_TOOLS_DIR, 'pylint-1.9.4', 'configparser.py')
    pylint_newlines = []
    with python_utils.open_file(pylint_configparser_filepath, 'r') as f:
        for line in f.readlines():
            if line.strip() == 'ConverterMapping,':
                continue
            if line.strip().endswith('"ConverterMapping",'):
                pylint_newlines.append(
                    line[:line.find('"ConverterMapping"')] + '\n')
            else:
                pylint_newlines.append(line)
    with python_utils.open_file(pylint_configparser_filepath, 'w+') as f:
        f.writelines(pylint_newlines)

    # Do similar surgery on configparser in pylint-quotes-0.1.8 to remove
    # dependency on ConverterMapping.
    pq_configparser_filepath = os.path.join(
        common.OPPIA_TOOLS_DIR, 'pylint-quotes-0.1.8', 'configparser.py')
    pq_newlines = []
    with python_utils.open_file(pq_configparser_filepath, 'r') as f:
        for line in f.readlines():
            if line.strip() == 'ConverterMapping,':
                continue
            if line.strip() == '"ConverterMapping",':
                continue
            pq_newlines.append(line)
    with python_utils.open_file(pq_configparser_filepath, 'w+') as f:
        f.writelines(pq_newlines)

    # Download and install required JS and zip files.
    python_utils.PRINT('Installing third-party JS libraries and zip files.')
    install_third_party.main(args=[])

    # Install third-party node modules needed for the build process.
    subprocess.call(['yarn'])

    install_skulpt(parsed_args)

    # Install pre-commit script.
    python_utils.PRINT('Installing pre-commit hook for git')
    pre_commit_hook.main(args=['--install'])

    # Install pre-push script.
    python_utils.PRINT('Installing pre-push hook for git')
    pre_push_hook.main(args=['--install'])
 def mock_read_travis_yml_file():
     travis_ci_file = python_utils.open_file(
         os.path.join(
             DUMMY_CONF_FILES, '.dummy_travis.yml'), 'r').read()
     travis_ci_dict = utils.dict_from_yaml(travis_ci_file)
     return travis_ci_dict
Beispiel #10
0
def remove_comments(text):
    # type: (Text) -> Text
    """Removes comments from given text.

    Args:
        text: str. The text from which comments should be removed.

    Returns:
        str. Text with all its comments removed.
    """
    return re.sub(r'  //.*\n', r'', text)


class Constants(dict): # type: ignore[type-arg]
    """Transforms dict to object, attributes can be accessed by dot notation."""

    def __setattr__(self, name, value):
        # type: (Text, Any) -> None
        self[name] = value

    def __getattr__(self, name):
        # type: (Text) -> Any
        return self[name]


with python_utils.open_file(os.path.join('assets', 'constants.ts'), 'r') as f: # type: ignore[no-untyped-call]
    constants = Constants(parse_json_from_js(f))  # pylint:disable=invalid-name

with python_utils.open_file('release_constants.json', 'r') as f: # type: ignore[no-untyped-call]
    release_constants = Constants(json.loads(f.read()))  # pylint:disable=invalid-name
Beispiel #11
0
    def test_image_upload_and_download(self):
        """Test image uploading and downloading."""
        self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
        admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
        self.set_admins([self.ADMIN_USERNAME])

        subtopic = topic_domain.Subtopic.create_default_subtopic(
            1, 'Subtopic Title')
        story_id = story_services.get_new_story_id()
        topic_id = topic_services.get_new_topic_id()
        skill_id = skill_services.get_new_skill_id()
        self.save_new_story(story_id, admin_id, topic_id)
        self.save_new_topic(
            topic_id, admin_id, name='Name',
            description='Description', canonical_story_ids=[story_id],
            additional_story_ids=[], uncategorized_skill_ids=[],
            subtopics=[subtopic], next_subtopic_id=2)
        self.save_new_skill(skill_id, admin_id, description='Description')

        # Page context: Exploration.
        self.login(self.EDITOR_EMAIL)
        csrf_token = self.get_new_csrf_token()

        with python_utils.open_file(
            os.path.join(feconf.TESTS_DATA_DIR, 'img.png'),
            'rb', encoding=None) as f:
            raw_image = f.read()
        response_dict = self.post_json(
            '%s/exploration/0' % self.IMAGE_UPLOAD_URL_PREFIX,
            {'filename': 'test.png'},
            csrf_token=csrf_token,
            upload_files=(('image', 'unused_filename', raw_image),)
        )
        filename = response_dict['filename']

        self.logout()

        response = self.get_custom_response(
            self._get_image_url('exploration', '0', filename), 'image/png')
        self.assertEqual(response.body, raw_image)

        # Page context: Topic.
        self.login(self.ADMIN_EMAIL)
        csrf_token = self.get_new_csrf_token()

        with python_utils.open_file(
            os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb',
            encoding=None) as f:
            raw_image = f.read()
        response_dict = self.post_json(
            '%s/topic/%s' % (self.IMAGE_UPLOAD_URL_PREFIX, topic_id),
            {'filename': 'test.png'},
            csrf_token=csrf_token,
            upload_files=(('image', 'unused_filename', raw_image),)
        )
        filename = response_dict['filename']

        self.logout()

        response = self.get_custom_response(
            self._get_image_url('topic', topic_id, filename), 'image/png')
        self.assertEqual(response.body, raw_image)

        # Page context: Story.
        self.login(self.ADMIN_EMAIL)
        csrf_token = self.get_new_csrf_token()

        with python_utils.open_file(
            os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb',
            encoding=None) as f:
            raw_image = f.read()
        response_dict = self.post_json(
            '%s/story/%s' % (self.IMAGE_UPLOAD_URL_PREFIX, story_id),
            {'filename': 'test.png'},
            csrf_token=csrf_token,
            upload_files=(('image', 'unused_filename', raw_image),)
        )
        filename = response_dict['filename']

        self.logout()

        response = self.get_custom_response(
            self._get_image_url('story', story_id, filename), 'image/png')
        self.assertEqual(response.body, raw_image)

        # Page context: Skill.
        self.login(self.ADMIN_EMAIL)
        csrf_token = self.get_new_csrf_token()

        with python_utils.open_file(
            os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb',
            encoding=None) as f:
            raw_image = f.read()
        response_dict = self.post_json(
            '%s/skill/%s' % (self.IMAGE_UPLOAD_URL_PREFIX, skill_id),
            {'filename': 'test.png'},
            csrf_token=csrf_token,
            upload_files=(('image', 'unused_filename', raw_image),)
        )
        filename = response_dict['filename']

        self.logout()

        response = self.get_custom_response(
            self._get_image_url('skill', skill_id, filename), 'image/png')
        self.assertEqual(response.body, raw_image)
Beispiel #12
0
                            'esprima-%s' % common.ESPRIMA_VERSION)

sys.path.insert(1, ESPRIMA_PATH)

# pylint: disable=wrong-import-order
# pylint: disable=wrong-import-position
import esprima  # isort:skip
from .. import build  # isort:skip
# pylint: enable=wrong-import-order
# pylint: enable=wrong-import-position

FILES_EXCLUDED_FROM_ANY_TYPE_CHECK_PATH = os.path.join(
    CURR_DIR, 'scripts', 'linters', 'excluded_any_type_files.json')

FILES_EXCLUDED_FROM_ANY_TYPE_CHECK = json.load(
    python_utils.open_file(FILES_EXCLUDED_FROM_ANY_TYPE_CHECK_PATH, 'r'))


def _get_expression_from_node_if_one_exists(parsed_node, components_to_check):
    """This function first checks whether the parsed node represents
    the required angular component that needs to be derived by checking if
    its in the 'components_to_check' list. If yes, then it  will return the
    expression part of the node from which the component can be derived.
    If no, it will return None. It is done by filtering out
    'AssignmentExpression' (as it represents an assignment) and 'Identifier'
    (as it represents a static expression).

    Args:
        parsed_node: dict. Parsed node of the body of a JS file.
        components_to_check: list(str). List of angular components to check
            in a JS file. These include directives, factories, controllers,
Beispiel #13
0
def get_file_contents(filepath, mode='r'):
    """Gets the contents of a file, given a relative filepath from oppia/."""
    with python_utils.open_file(filepath, mode) as f:
        return f.read()
    def test_release_summary_content(self):
        def mock_get_current_version_tag(unused_repo):
            return github.Tag.Tag(requester='',
                                  headers='',
                                  attributes={'commit': {
                                      'sha': 'sha'
                                  }},
                                  completed='')

        def mock_get_extra_commits_in_new_release(unused_base_commit,
                                                  unused_repo):
            return [
                github.Commit.Commit(requester='',
                                     headers='',
                                     attributes={'sha': 'sha1'},
                                     completed=''),
                github.Commit.Commit(requester='',
                                     headers='',
                                     attributes={'sha': 'sha2'},
                                     completed=''),
                github.Commit.Commit(requester='',
                                     headers='',
                                     attributes={'sha': 'sha3'},
                                     completed='')
            ]

        def mock_gather_logs(unused_start, stop='HEAD'):
            new_log1 = generate_release_info.Log('sha1', 'author1', 'email1',
                                                 'message1')
            new_log2 = generate_release_info.Log('sha2', 'author2', 'email2',
                                                 'message2')
            old_log = generate_release_info.Log('sha3', 'author3', 'email3',
                                                'message3')
            cherrypick_log = generate_release_info.Log('sha4', 'author4',
                                                       'email4', 'message4')
            if stop == 'HEAD':
                return [new_log1, new_log2, old_log, cherrypick_log]
            else:
                return [old_log]

        def mock_extract_issues(unused_logs):
            return {'issues'}

        def mock_check_versions(unused_current_release):
            return ['version_change']

        def mock_check_setup_scripts(unused_base_release_tag):
            return {'setup_changes': True}

        def mock_check_storage_models(unused_current_release):
            return ['storage_changes']

        def mock_extract_pr_numbers(unused_logs):
            return []

        def mock_get_prs_from_pr_numbers(unused_pr_numbers, unused_repo):
            return []

        def mock_get_changelog_categories(unused_pulls):
            return {'category': ['pr1', 'pr2']}

        version_tag_swap = self.swap(generate_release_info,
                                     'get_current_version_tag',
                                     mock_get_current_version_tag)
        extra_commits_swap = self.swap(generate_release_info,
                                       'get_extra_commits_in_new_release',
                                       mock_get_extra_commits_in_new_release)
        gather_logs_swap = self.swap(generate_release_info, 'gather_logs',
                                     mock_gather_logs)
        extract_issues_swap = self.swap(generate_release_info,
                                        'extract_issues', mock_extract_issues)
        check_versions_swap = self.swap(generate_release_info,
                                        'check_versions', mock_check_versions)
        setup_scripts_swap = self.swap(generate_release_info,
                                       'check_setup_scripts',
                                       mock_check_setup_scripts)
        storage_models_swap = self.swap(generate_release_info,
                                        'check_storage_models',
                                        mock_check_storage_models)
        extract_prs_swap = self.swap(generate_release_info,
                                     'extract_pr_numbers',
                                     mock_extract_pr_numbers)
        get_prs_swap = self.swap(generate_release_info,
                                 'get_prs_from_pr_numbers',
                                 mock_get_prs_from_pr_numbers)
        get_changelog_swap = self.swap(generate_release_info,
                                       'get_changelog_categories',
                                       mock_get_changelog_categories)

        tmp_file = tempfile.NamedTemporaryFile()
        release_summary_swap = self.swap(release_constants,
                                         'RELEASE_SUMMARY_FILEPATH',
                                         tmp_file.name)

        with self.branch_name_swap, self.open_browser_swap:
            with self.get_organization_swap, self.get_repo_swap:
                with self.getpass_swap, version_tag_swap:
                    with extra_commits_swap, get_prs_swap:
                        with gather_logs_swap, extract_issues_swap:
                            with check_versions_swap, setup_scripts_swap:
                                with storage_models_swap, release_summary_swap:
                                    with get_changelog_swap, extract_prs_swap:
                                        generate_release_info.main(
                                            'test-token')
        with python_utils.open_file(GENERATED_RELEASE_SUMMARY_FILEPATH,
                                    'r') as f:
            expected_lines = f.readlines()
        with python_utils.open_file(tmp_file.name, 'r') as f:
            actual_lines = f.readlines()
        update_changelog_and_credits.check_ordering_of_sections(actual_lines)
        self.assertEqual(actual_lines, expected_lines)
Beispiel #15
0
class BaseRteComponent(python_utils.OBJECT):
    """Base Rte Component class.

    This is the superclass for rich text components in Oppia, such as
    Image and Video.
    """

    with python_utils.open_file(
        feconf.RTE_EXTENSIONS_DEFINITIONS_PATH, 'r') as f:
        rich_text_component_specs = constants.parse_json_from_js(f)

    obj_types_to_obj_classes = {
        'unicode': objects.UnicodeString,
        'html': objects.Html,
        'Filepath': objects.Filepath,
        'SanitizedUrl': objects.SanitizedUrl,
        'MathExpressionContent': objects.MathExpressionContent,
        'ListOfTabs': objects.ListOfTabs,
        'SvgFilename': objects.SvgFilename,
        'int': objects.Int,
        'bool': objects.Boolean,
        'SkillSelector': objects.SkillSelector
    }

    @classmethod
    def validate(cls, value_dict):
        """Validates customization args for a rich text component.

        Raises:
            TypeError. If any customization arg is invalid.
        """
        arg_names_to_obj_classes = {}
        customization_arg_specs = cls.rich_text_component_specs[
            cls.__name__]['customization_arg_specs']
        for customization_arg_spec in customization_arg_specs:
            arg_name = '%s-with-value' % customization_arg_spec['name']
            schema = customization_arg_spec['schema']
            if schema['type'] != 'custom':
                obj_type = schema['type']
            else:
                obj_type = schema['obj_type']
            obj_class = cls.obj_types_to_obj_classes[obj_type]
            arg_names_to_obj_classes[arg_name] = obj_class

        required_attr_names = list(arg_names_to_obj_classes.keys())
        attr_names = list(value_dict.keys())

        if set(attr_names) != set(required_attr_names):
            missing_attr_names = list(
                set(required_attr_names) - set(attr_names))
            extra_attr_names = list(set(attr_names) - set(required_attr_names))
            raise utils.ValidationError(
                'Missing attributes: %s, Extra attributes: %s' % (
                    ', '.join(missing_attr_names),
                    ', '.join(extra_attr_names)
                )
            )

        for arg_name in required_attr_names:
            arg_obj_class = arg_names_to_obj_classes[arg_name]
            arg_obj_class.normalize(value_dict[arg_name])
Beispiel #16
0
def main():
    """Collects necessary info and dumps it to disk."""
    branch_name = _get_current_branch()
    if not re.match(r'release-\d+\.\d+\.\d+$', branch_name):
        raise Exception(
            'This script should only be run from the latest release branch.')

    parsed_args = _PARSER.parse_args()
    if parsed_args.personal_access_token is None:
        python_utils.PRINT(
            'No personal access token provided, please set up a personal '
            'access token at https://github.com/settings/tokens and pass it '
            'to the script using --personal_access_token=<token>')
        return

    personal_access_token = parsed_args.personal_access_token
    g = github.Github(personal_access_token)
    repo = g.get_organization('oppia').get_repo('oppia')

    current_release = _get_current_version_tag(repo)
    current_release_tag = current_release.name
    base_commit = current_release.commit.sha
    new_commits = get_extra_commits_in_new_release(base_commit, repo)
    new_release_logs = _gather_logs(base_commit)

    for index, log in enumerate(new_release_logs):
        is_cherrypicked = all(
            [log.sha1 != commit.sha for commit in new_commits])
        if is_cherrypicked:
            del new_release_logs[index]

    past_logs = _gather_logs(FIRST_OPPIA_COMMIT, stop=base_commit)
    issue_links = _extract_issues(new_release_logs)
    feconf_version_changes = _check_versions(current_release_tag)
    setup_changes = _check_setup_scripts(current_release_tag)
    storage_changes = _check_storage_models(current_release_tag)

    pr_numbers = _extract_pr_numbers(new_release_logs)
    prs = get_prs_from_pr_numbers(pr_numbers, repo)
    categorized_pr_titles = get_changelog_categories(prs)

    summary_file = os.path.join(os.getcwd(), os.pardir, 'release_summary.md')
    with python_utils.open_file(summary_file, 'w') as out:
        out.write('## Collected release information\n')

        if feconf_version_changes:
            out.write('\n### Feconf version changes:\nThis indicates that a '
                      'migration may be needed\n\n')
            for var in feconf_version_changes:
                out.write('* %s  \n' % var)

        if setup_changes:
            out.write('\n### Changed setup scripts:\n')
            for var in setup_changes.keys():
                out.write('* %s  \n' % var)

        if storage_changes:
            out.write('\n### Changed storage models:\n')
            for item in storage_changes:
                out.write('* %s  \n' % item)

        past_authors = {log.email: log.author for log in past_logs}
        release_authors = {(log.author, log.email) for log in new_release_logs}

        new_authors = sorted(
            set([(name, email) for name, email in release_authors
                 if email not in past_authors]))
        existing_authors = sorted(
            set([(name, email) for name, email in release_authors
                 if email in past_authors]))
        new_author_names = [name for name, _ in new_authors]
        existing_author_names = [name for name, _ in existing_authors]

        # TODO(apb7): duplicate author handling due to email changes.
        out.write('\n### New Authors:\n')
        for name, email in new_authors:
            out.write('* %s <%s>\n' % (name, email))

        out.write('\n### Existing Authors:\n')
        for name, email in existing_authors:
            out.write('* %s <%s>\n' % (name, email))

        # Generate the author sections of the email.
        out.write('\n### Email C&P Blurbs about authors:\n')
        new_author_comma_list = (
            '%s, and %s' %
            (', '.join(new_author_names[:-1]), new_author_names[-1]))
        existing_author_comma_list = (
            '%s, and %s' %
            (', '.join(existing_author_names[:-1]), existing_author_names[-1]))
        out.write(
            '``Please welcome %s for whom this release marks their first '
            'contribution to Oppia!``\n\n' % new_author_comma_list)
        out.write(
            '``Thanks to %s, our returning contributors who made this release '
            'possible.``\n' % existing_author_comma_list)

        if parsed_args.personal_access_token:
            out.write('\n### Changelog: \n')
            for category in categorized_pr_titles:
                out.write('%s\n' % category)
                for pr_title in categorized_pr_titles[category]:
                    out.write('* %s\n' % pr_title)
                out.write('\n')

        out.write('\n### Commit History:\n')
        for name, title in [(log.author, log.message.split('\n\n')[0])
                            for log in new_release_logs]:
            out.write('* %s\n' % title)

        if issue_links:
            out.write('\n### Issues mentioned in commits:\n')
            for link in issue_links:
                out.write('* [%s](%s)  \n' % (link, link))

    python_utils.PRINT('Done. Summary file generated in ../release_summary.md')
def update_developer_names(release_summary_lines):
    """Updates about-page.directive.html file.

    Args:
        release_summary_lines: list(str). List of lines in
            ../release_summary.md.
    """
    python_utils.PRINT('Updating about-page file...')
    start_index = release_summary_lines.index('### New Contributors:\n') + 1
    end_index = release_summary_lines.index(
        '### Email C&P Blurbs about authors:\n') - 1
    new_contributors = (release_summary_lines[start_index:end_index])
    new_contributors = [
        contributor.replace('* ', '') for contributor in new_contributors
    ]
    new_developer_names = [
        contributor.split('<')[0].strip() for contributor in new_contributors
    ]
    new_developer_names.sort()

    with python_utils.open_file(ABOUT_PAGE_FILEPATH, 'r') as about_page_file:
        about_page_lines = about_page_file.readlines()

        (span_indent, li_indent) = find_indentation(about_page_lines)

        developer_name_dict = collections.defaultdict(list)
        for developer_name in new_developer_names:
            developer_name_dict[developer_name[0].upper()].append(
                '%s<li>%s</li>\n' % (li_indent, developer_name))

        for char in developer_name_dict:
            # This case is only for developer names starting with Q since
            # as of now we have no developers listed whose names start with
            # a Q.
            if '%s<span>%s</span>\n' % (span_indent,
                                        char) not in about_page_lines:
                prev_char = chr(ord(char) - 1)
                prev_start_index = about_page_lines.index(
                    '%s<span>%s</span>\n' % (span_indent, prev_char)) + 2
                prev_end_index = (prev_start_index +
                                  about_page_lines[prev_start_index:].index(
                                      '%s</ul>\n' % span_indent))
                developer_names = sorted(developer_name_dict[char],
                                         key=lambda s: s.lower())
                span_elem = '%s<span>%s</span>\n' % (span_indent, char)
                ul_start_elem = '%s<ul>\n' % span_indent
                ul_end_elem = '%s</ul>\n' % span_indent
                about_page_lines[prev_end_index + 1:prev_end_index +
                                 1] = ([span_elem, ul_start_elem] +
                                       developer_names + [ul_end_elem])
                continue

            start_index = about_page_lines.index('%s<span>%s</span>\n' %
                                                 (span_indent, char)) + 2
            end_index = start_index + about_page_lines[start_index:].index(
                '%s</ul>\n' % span_indent)

            old_developer_names = about_page_lines[start_index:end_index]
            updated_developer_names = list(
                set((old_developer_names + developer_name_dict[char])))
            updated_developer_names = sorted(updated_developer_names,
                                             key=lambda s: s.lower())
            about_page_lines[start_index:end_index] = updated_developer_names

    with python_utils.open_file(ABOUT_PAGE_FILEPATH, 'w') as about_page_file:
        for line in about_page_lines:
            about_page_file.write(line)
    python_utils.PRINT('Updated about-page file!')
Beispiel #18
0
    def check_third_party_libs_type_defs(self):
        """Checks the type definitions for third party libs
        are up to date.

        Returns:
            TaskResult. A TaskResult object representing the result of the lint
            check.
        """
        name = 'Third party type defs'

        failed = False
        error_messages = []

        manifest = json.load(
            python_utils.open_file(MANIFEST_JSON_FILE_PATH,
                                   'r'))['dependencies']['frontend']

        package = json.load(python_utils.open_file(PACKAGE_JSON_FILE_PATH,
                                                   'r'))['dependencies']

        files_in_typings_dir = os.listdir(os.path.join(os.getcwd(), 'typings'))

        for third_party_lib in THIRD_PARTY_LIBS:
            lib_dependency_source = third_party_lib['dependency_source']

            if lib_dependency_source == _DEPENDENCY_SOURCE_MANIFEST:
                lib_version = (
                    manifest[third_party_lib['dependency_key']]['version'])

            elif lib_dependency_source == _DEPENDENCY_SOURCE_PACKAGE:
                lib_version = package[third_party_lib['dependency_key']]

                if lib_version[0] == '^':
                    lib_version = lib_version[1:]

            prefix_name = third_party_lib['type_defs_filename_prefix']

            files_with_prefix_name = []

            files_with_prefix_name = [
                file_name for file_name in files_in_typings_dir
                if file_name.startswith(prefix_name)
            ]

            if len(files_with_prefix_name) > 1:
                error_message = (
                    'There are multiple type definitions for %s in the typings '
                    'dir.' % third_party_lib['name'])
                error_messages.append(error_message)
                failed = True
            elif len(files_with_prefix_name) == 0:
                error_message = (
                    'There are no type definitions for %s in the typings '
                    'dir.' % third_party_lib['name'])
                error_messages.append(error_message)
                failed = True
            else:
                type_defs_filename = files_with_prefix_name[0]

                type_defs_version = type_defs_filename[
                    len(prefix_name):-_TYPE_DEFS_FILE_EXTENSION_LENGTH]

                if lib_version != type_defs_version:
                    error_message = (
                        'Type definitions for %s are not up to date. The '
                        'current version of %s is %s and the type definitions '
                        'are for version %s. Please refer typings/README.md '
                        'for more details.' %
                        (third_party_lib['name'], third_party_lib['name'],
                         lib_version, type_defs_version))
                    error_messages.append(error_message)
                    failed = True

        return concurrent_task_utils.TaskResult(name, failed, error_messages,
                                                error_messages)
def main(personal_access_token):
    """Collects necessary info and dumps it to disk.

    Args:
        personal_access_token: str. The personal access token for the
            GitHub id of user.
    """
    if not common.is_current_branch_a_release_branch():
        raise Exception(
            'This script should only be run from the latest release branch.')
    g = github.Github(personal_access_token)
    repo = g.get_organization('oppia').get_repo('oppia')

    current_release = get_current_version_tag(repo)
    current_release_tag = current_release.name
    base_commit = current_release.commit.sha
    new_commits = get_extra_commits_in_new_release(base_commit, repo)
    new_release_logs = gather_logs(base_commit)

    for index, log in enumerate(new_release_logs):
        is_cherrypicked = all(
            [log.sha1 != commit.sha for commit in new_commits])
        if is_cherrypicked:
            del new_release_logs[index]

    past_logs = gather_logs(FIRST_OPPIA_COMMIT, stop=base_commit)
    issue_links = extract_issues(new_release_logs)
    feconf_version_changes = check_versions(current_release_tag)
    setup_changes = check_setup_scripts(current_release_tag)
    storage_changes = check_storage_models(current_release_tag)

    pr_numbers = extract_pr_numbers(new_release_logs)
    prs = get_prs_from_pr_numbers(pr_numbers, repo)
    categorized_pr_titles = get_changelog_categories(prs)

    with python_utils.open_file(release_constants.RELEASE_SUMMARY_FILEPATH,
                                'w') as out:
        out.write('## Collected release information\n')

        if feconf_version_changes:
            out.write('\n### Feconf version changes:\nThis indicates that a '
                      'migration may be needed\n\n')
            for var in feconf_version_changes:
                out.write('* %s\n' % var)

        if setup_changes:
            out.write('\n### Changed setup scripts:\n')
            for var in setup_changes.keys():
                out.write('* %s\n' % var)

        if storage_changes:
            out.write('\n### Changed storage models:\n')
            for item in storage_changes:
                out.write('* %s\n' % item)

        past_authors = {log.email: log.author for log in past_logs}
        release_authors = {(log.author, log.email) for log in new_release_logs}

        new_authors = sorted(
            set([(name, email) for name, email in release_authors
                 if email not in past_authors]))
        existing_authors = sorted(
            set([(name, email) for name, email in release_authors
                 if email in past_authors]))
        new_author_names = [name for name, _ in new_authors]
        existing_author_names = [name for name, _ in existing_authors]

        # TODO(apb7): Duplicate author handling due to email changes.
        out.write('\n%s' % release_constants.NEW_AUTHORS_HEADER)
        for name, email in new_authors:
            out.write('* %s <%s>\n' % (name, email))

        out.write('\n%s' % release_constants.EXISTING_AUTHORS_HEADER)
        for name, email in existing_authors:
            out.write('* %s <%s>\n' % (name, email))

        out.write('\n%s' % release_constants.NEW_CONTRIBUTORS_HEADER)
        for name, email in new_authors:
            out.write('* %s <%s>\n' % (name, email))

        # Generate the author sections of the email.
        out.write('\n%s' % release_constants.EMAIL_HEADER)
        new_author_comma_list = (
            '%s, and %s' %
            (', '.join(new_author_names[:-1]), new_author_names[-1]))
        existing_author_comma_list = (
            '%s, and %s' %
            (', '.join(existing_author_names[:-1]), existing_author_names[-1]))
        out.write(
            '``Please welcome %s for whom this release marks their first '
            'contribution to Oppia!``\n\n' % new_author_comma_list)
        out.write(
            '``Thanks to %s, our returning contributors who made this release '
            'possible.``\n' % existing_author_comma_list)

        if personal_access_token:
            out.write('\n%s' % release_constants.CHANGELOG_HEADER)
            for category in categorized_pr_titles:
                out.write('%s\n' % category)
                for pr_title in categorized_pr_titles[category]:
                    out.write('* %s\n' % pr_title)
                out.write('\n')

        out.write('\n%s' % release_constants.COMMIT_HISTORY_HEADER)
        for name, title in [(log.author, log.message.split('\n\n')[0])
                            for log in new_release_logs]:
            out.write('* %s\n' % title)

        if issue_links:
            out.write('\n%s' % release_constants.ISSUES_HEADER)
            for link in issue_links:
                out.write('* [%s](%s)\n' % (link, link))

    python_utils.PRINT('Done. Summary file generated in %s' %
                       (release_constants.RELEASE_SUMMARY_FILEPATH))
Beispiel #20
0
    def test_re_build_recently_changed_files_at_dev_dir(self):
        temp_file = tempfile.NamedTemporaryFile()
        temp_file_name = '%ssome_file.js' % MOCK_EXTENSIONS_DEV_DIR
        temp_file.name = temp_file_name
        with python_utils.open_file('%ssome_file.js' % MOCK_EXTENSIONS_DEV_DIR,
                                    'w') as tmp:
            tmp.write(u'Some content.')

        EXTENSIONS_DIRNAMES_TO_DIRPATHS = {
            'dev_dir':
            MOCK_EXTENSIONS_DEV_DIR,
            'staging_dir':
            os.path.join(TEST_DIR, 'backend_prod_files', 'extensions', ''),
            'out_dir':
            os.path.join(TEST_DIR, 'build', 'extensions', '')
        }

        build_dir_tasks = collections.deque()
        build_all_files_tasks = (
            build.generate_build_tasks_to_build_all_files_in_directory(
                MOCK_EXTENSIONS_DEV_DIR,
                EXTENSIONS_DIRNAMES_TO_DIRPATHS['out_dir']))
        self.assertGreater(len(build_all_files_tasks), 0)

        # Test for building all files when staging dir does not exist.
        self.assertEqual(len(build_dir_tasks), 0)
        build_dir_tasks += build.generate_build_tasks_to_build_directory(
            EXTENSIONS_DIRNAMES_TO_DIRPATHS)
        self.assertEqual(len(build_dir_tasks), len(build_all_files_tasks))

        build.safe_delete_directory_tree(TEST_DIR)
        build_dir_tasks.clear()

        # Test for building only new files when staging dir exists.
        build.ensure_directory_exists(
            EXTENSIONS_DIRNAMES_TO_DIRPATHS['staging_dir'])
        self.assertEqual(len(build_dir_tasks), 0)

        build_dir_tasks = build.generate_build_tasks_to_build_directory(
            EXTENSIONS_DIRNAMES_TO_DIRPATHS)
        file_extensions_to_always_rebuild = ('.py', '.js', '.html')
        always_rebuilt_filepaths = build.get_filepaths_by_extensions(
            MOCK_EXTENSIONS_DEV_DIR, file_extensions_to_always_rebuild)
        self.assertEqual(
            sorted(always_rebuilt_filepaths),
            sorted([
                'base.py', 'CodeRepl.py', '__init__.py', 'some_file.js',
                'DragAndDropSortInput.py', 'code_repl_prediction.html'
            ]))
        self.assertGreater(len(always_rebuilt_filepaths), 0)

        # Test that 'some_file.js' is not rebuilt, i.e it is built for the first
        # time.
        self.assertEqual(len(build_dir_tasks), len(always_rebuilt_filepaths))
        self.assertIn('some_file.js', always_rebuilt_filepaths)
        self.assertNotIn('some_file.js', build_dir_tasks)

        build.safe_delete_directory_tree(TEST_DIR)
        temp_file.close()

        if os.path.isfile(temp_file_name):
            # On Windows system, occasionally this temp file is not deleted.
            os.remove(temp_file_name)
Beispiel #21
0
def preprocess_release(app_name, deploy_data_path):
    """Pre-processes release files.

    This function should be called from within release_dir_name defined
    in execute_deployment function. Currently it does the following:

    (1) Substitutes files from the per-app deployment data.
    (2) Change GCS_RESOURCE_BUCKET in assets/constants.ts.

    Args:
        app_name: str. Name of the app to deploy.
        deploy_data_path: str. Path for deploy data directory.

    Raises:
        Exception. Could not find deploy data directory.
        Exception. Could not find source path.
        Exception. Could not find destination path.
    """
    if not os.path.exists(deploy_data_path):
        raise Exception('Could not find deploy_data directory at %s' %
                        deploy_data_path)

    # Copies files in root folder to assets/.
    for filename in FILES_AT_ROOT:
        src = os.path.join(deploy_data_path, filename)
        dst = os.path.join(os.getcwd(), 'assets', filename)
        if not os.path.exists(src):
            raise Exception(
                'Could not find source path %s. Please check your deploy_data '
                'folder.' % src)
        if not os.path.exists(dst):
            raise Exception(
                'Could not find destination path %s. Has the code been '
                'updated in the meantime?' % dst)
        shutil.copyfile(src, dst)

    # Copies files in images to /assets/images.
    for dir_name in IMAGE_DIRS:
        src_dir = os.path.join(deploy_data_path, 'images', dir_name)
        dst_dir = os.path.join(os.getcwd(), 'assets', 'images', dir_name)

        if not os.path.exists(src_dir):
            raise Exception(
                'Could not find source dir %s. Please check your deploy_data '
                'folder.' % src_dir)
        common.ensure_directory_exists(dst_dir)

        for filename in os.listdir(src_dir):
            src = os.path.join(src_dir, filename)
            dst = os.path.join(dst_dir, filename)
            shutil.copyfile(src, dst)

    with python_utils.open_file(os.path.join(common.CONSTANTS_FILE_PATH),
                                'r') as assets_file:
        content = assets_file.read()

    assert '"DEV_MODE": true' in content, 'Invalid DEV_MODE'
    assert '"GCS_RESOURCE_BUCKET_NAME": "None-resources",' in content, (
        'Invalid value for GCS_RESOURCE_BUCKET_NAME in %s' %
        (common.CONSTANTS_FILE_PATH))
    bucket_name = app_name + BUCKET_NAME_SUFFIX
    common.inplace_replace_file(
        common.CONSTANTS_FILE_PATH,
        r'"GCS_RESOURCE_BUCKET_NAME": "None-resources",',
        '"GCS_RESOURCE_BUCKET_NAME": "%s",' % bucket_name)
Beispiel #22
0
    def setUp(self):
        super(TrainedClassifierHandlerTests, self).setUp()

        self.exp_id = 'exp_id1'
        self.title = 'Testing Classifier storing'
        self.category = 'Test'
        yaml_path = os.path.join(
            feconf.TESTS_DATA_DIR, 'string_classifier_test.yaml')
        with python_utils.open_file(yaml_path, 'r') as yaml_file:
            self.yaml_content = yaml_file.read()
        self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
        self.signup('*****@*****.**', 'mod')

        assets_list = []
        with self.swap(feconf, 'ENABLE_ML_CLASSIFIERS', True):
            exp_services.save_new_exploration_from_yaml_and_assets(
                feconf.SYSTEM_COMMITTER_ID, self.yaml_content, self.exp_id,
                assets_list)
        self.exploration = exp_fetchers.get_exploration_by_id(self.exp_id)
        self.algorithm_id = feconf.INTERACTION_CLASSIFIER_MAPPING[
            self.exploration.states['Home'].interaction.id]['algorithm_id']
        self.algorithm_version = feconf.INTERACTION_CLASSIFIER_MAPPING[
            self.exploration.states['Home'].interaction.id]['algorithm_version']

        self.classifier_data = {
            '_alpha': 0.1,
            '_beta': 0.001,
            '_prediction_threshold': 0.5,
            '_training_iterations': 25,
            '_prediction_iterations': 5,
            '_num_labels': 10,
            '_num_docs': 12,
            '_num_words': 20,
            '_label_to_id': {'text': 1},
            '_word_to_id': {'hello': 2},
            '_w_dp': [],
            '_b_dl': [],
            '_l_dp': [],
            '_c_dl': [],
            '_c_lw': [],
            '_c_l': [],
        }
        classifier_training_job = (
            classifier_services.get_classifier_training_job(
                self.exp_id, self.exploration.version, 'Home',
                self.algorithm_id))
        self.assertIsNotNone(classifier_training_job)
        self.job_id = classifier_training_job.job_id

        # TODO(pranavsid98): Replace the three commands below with
        # mark_training_job_pending after Giritheja's PR gets merged.
        classifier_training_job_model = (
            classifier_models.ClassifierTrainingJobModel.get(
                self.job_id, strict=False))
        classifier_training_job_model.status = (
            feconf.TRAINING_JOB_STATUS_PENDING)
        classifier_training_job_model.update_timestamps()
        classifier_training_job_model.put()

        self.job_result = (
            training_job_response_payload_pb2.TrainingJobResponsePayload.
            JobResult())
        self.job_result.job_id = self.job_id

        classifier_frozen_model = (
            text_classifier_pb2.TextClassifierFrozenModel())
        classifier_frozen_model.model_json = json.dumps(self.classifier_data)

        self.job_result.text_classifier.CopyFrom(classifier_frozen_model)

        self.payload_proto = (
            training_job_response_payload_pb2.TrainingJobResponsePayload())
        self.payload_proto.job_result.CopyFrom(self.job_result)
        self.payload_proto.vm_id = feconf.DEFAULT_VM_ID
        self.secret = feconf.DEFAULT_VM_SHARED_SECRET
        self.payload_proto.signature = classifier_services.generate_signature(
            python_utils.convert_to_bytes(self.secret),
            self.payload_proto.job_result.SerializeToString(),
            self.payload_proto.vm_id)

        self.payload_for_fetching_next_job_request = {
            'vm_id': feconf.DEFAULT_VM_ID,
            'message': json.dumps({})
        }

        self.payload_for_fetching_next_job_request['signature'] = (
            classifier_services.generate_signature(
                python_utils.convert_to_bytes(self.secret),
                self.payload_for_fetching_next_job_request['message'],
                self.payload_for_fetching_next_job_request['vm_id']))
Beispiel #23
0
    def test_default_interactions_are_valid(self):
        """Test that the default interactions are valid."""

        all_interaction_ids = (
            interaction_registry.Registry.get_all_interaction_ids())
        for interaction_id in all_interaction_ids:

            # Check that the interaction id is valid.
            self.assertTrue(self._is_camel_cased(interaction_id))
            hyphenated_interaction_id = (
                utils.camelcase_to_hyphenated(interaction_id))

            # Check that the interaction directory exists.
            interaction_dir = os.path.join(feconf.INTERACTIONS_DIR,
                                           interaction_id)
            self.assertTrue(os.path.isdir(interaction_dir))

            # The interaction directory should contain the following files:
            #  Required:
            #    * A python file called {InteractionName}.py.
            #    * An __init__.py file used to import the Python file.
            #    * A TypeScript file called {InteractionName}.ts.
            #    * A directory name 'directives' containing TS and HTML files
            #      for directives
            #    * A directory named 'static' containing at least a .png file.
            #  Optional:
            #    * A JS file called protractor.js.
            interaction_dir_contents = (
                self._listdir_omit_ignored(interaction_dir))

            interaction_dir_optional_dirs_and_files_count = 0

            try:
                self.assertTrue(
                    os.path.isfile(
                        os.path.join(interaction_dir, 'protractor.js')))
                interaction_dir_optional_dirs_and_files_count += 1
            except Exception:
                pass

            try:
                self.assertTrue(
                    os.path.isfile(
                        os.path.join(
                            interaction_dir, '%s-prediction.service.ts' %
                            hyphenated_interaction_id)))
                interaction_dir_optional_dirs_and_files_count += 1
            except Exception:
                pass

            try:
                self.assertTrue(
                    os.path.isfile(
                        os.path.join(
                            interaction_dir, '%s-prediction.service.spec.ts' %
                            hyphenated_interaction_id)))
                interaction_dir_optional_dirs_and_files_count += 1
            except Exception:
                pass

            self.assertEqual(interaction_dir_optional_dirs_and_files_count + 5,
                             len(interaction_dir_contents))

            py_file = os.path.join(interaction_dir, '%s.py' % interaction_id)
            ts_file = os.path.join(interaction_dir, '%s.ts' % interaction_id)

            self.assertTrue(os.path.isfile(py_file))
            self.assertTrue(os.path.isfile(ts_file))

            # Check that __init__.py file exists.
            init_file = os.path.join(interaction_dir, '__init__.py')
            self.assertTrue(os.path.isfile(init_file))

            # Check that the directives subdirectory exists.
            directives_dir = os.path.join(interaction_dir, 'directives')
            self.assertTrue(os.path.isdir(directives_dir))

            # The directives directory should contain the following files:
            #  Required:
            #    * A TS file called
            #    oppia-interactive-{InteractionName}.directive.ts.
            #    * A TS file called OppiaResponse{InteractionName}.directive.ts.
            #    * A TS file called
            #    oppia-short-response-{InteractionName}.directive.ts.
            #    * A TS file called {InteractionName}-rules.service.ts.
            #    * A TS file called {InteractionName}-validation.service.ts.
            #    * A HTML file called
            #      {InteractionName}-interaction.directive.html.
            #    * A HTML file called
            #      {InteractionName}-response.directive.html.
            #    * A HTML file called
            #      {InteractionName}-short-response.directive.html.
            #  Optional:
            #    * A TS file called
            #      {InteractionName}-validation.service.specs.ts.
            #    * A TS file called {InteractionName}-rules.service.specs.ts.

            hyphenated_interaction_id = (
                utils.camelcase_to_hyphenated(interaction_id))
            if interaction_id in INTERACTIONS_THAT_USE_COMPONENTS:
                interaction_ts_file = os.path.join(
                    directives_dir, 'oppia-interactive-%s.component.ts' %
                    (hyphenated_interaction_id))
                response_ts_file = os.path.join(
                    directives_dir, 'oppia-response-%s.component.ts' %
                    hyphenated_interaction_id)
                short_response_ts_file = os.path.join(
                    directives_dir, 'oppia-short-response-%s.component.ts' %
                    (hyphenated_interaction_id))
                rules_service_ts_file = os.path.join(
                    directives_dir,
                    '%s-rules.service.ts' % hyphenated_interaction_id)
                validation_service_ts_file = os.path.join(
                    directives_dir,
                    '%s-validation.service.ts' % hyphenated_interaction_id)
                interaction_html = os.path.join(
                    directives_dir, '%s-interaction.component.html' %
                    hyphenated_interaction_id)
                response_html = os.path.join(
                    directives_dir,
                    '%s-response.component.html' % hyphenated_interaction_id)
                short_response_html = os.path.join(
                    directives_dir, '%s-short-response.component.html' %
                    hyphenated_interaction_id)
            else:
                interaction_ts_file = os.path.join(
                    directives_dir, 'oppia-interactive-%s.directive.ts' %
                    (hyphenated_interaction_id))
                response_ts_file = os.path.join(
                    directives_dir, 'oppia-response-%s.directive.ts' %
                    hyphenated_interaction_id)
                short_response_ts_file = os.path.join(
                    directives_dir, 'oppia-short-response-%s.directive.ts' %
                    (hyphenated_interaction_id))
                rules_service_ts_file = os.path.join(
                    directives_dir,
                    '%s-rules.service.ts' % hyphenated_interaction_id)
                validation_service_ts_file = os.path.join(
                    directives_dir,
                    '%s-validation.service.ts' % hyphenated_interaction_id)
                interaction_html = os.path.join(
                    directives_dir, '%s-interaction.directive.html' %
                    hyphenated_interaction_id)
                response_html = os.path.join(
                    directives_dir,
                    '%s-response.directive.html' % hyphenated_interaction_id)
                short_response_html = os.path.join(
                    directives_dir, '%s-short-response.directive.html' %
                    hyphenated_interaction_id)

            self.assertTrue(os.path.isfile(interaction_ts_file))
            self.assertTrue(os.path.isfile(response_ts_file))
            self.assertTrue(os.path.isfile(short_response_ts_file))
            self.assertTrue(os.path.isfile(interaction_html))
            self.assertTrue(os.path.isfile(response_html))
            self.assertTrue(os.path.isfile(short_response_html))
            self.assertTrue(os.path.isfile(rules_service_ts_file))
            self.assertTrue(os.path.isfile(validation_service_ts_file))

            # Check that the PNG thumbnail image has the correct dimensions.
            static_dir = os.path.join(interaction_dir, 'static')
            self.assertTrue(os.path.isdir(static_dir))
            png_file = os.path.join(interaction_dir, 'static',
                                    '%s.png' % interaction_id)
            self.assertTrue(os.path.isfile(png_file))
            with python_utils.open_file(png_file, 'rb', encoding=None) as f:
                img_data = f.read()
                width, height = struct.unpack('>LL', img_data[16:24])
                self.assertEqual(int(width), INTERACTION_THUMBNAIL_WIDTH_PX)
                self.assertEqual(int(height), INTERACTION_THUMBNAIL_HEIGHT_PX)

            interaction_ts_file_content = utils.get_file_contents(
                interaction_ts_file)
            response_ts_file_content = utils.get_file_contents(
                response_ts_file)
            short_response_ts_file_content = (
                utils.get_file_contents(short_response_ts_file))
            ts_file_content = utils.get_file_contents(ts_file)
            rules_service_ts_file_content = utils.get_file_contents(
                rules_service_ts_file)
            validation_service_ts_file_content = utils.get_file_contents(
                validation_service_ts_file)

            self.assertIn('oppiaInteractive%s' % interaction_id,
                          interaction_ts_file_content)
            self.assertIn('oppiaResponse%s' % interaction_id,
                          response_ts_file_content)
            self.assertIn('oppiaShortResponse%s' % interaction_id,
                          short_response_ts_file_content)
            self.assertIn(
                '%sRulesService' % (interaction_id[0] + interaction_id[1:]),
                rules_service_ts_file_content)
            self.assertIn('%sValidationService' % interaction_id,
                          validation_service_ts_file_content)

            # Check that the html template includes js script for the
            # interaction.
            self.assertTrue('oppia-interactive-%s.component.ts' %
                            hyphenated_interaction_id in ts_file_content
                            or ('oppia-interactive-%s.directive.ts' %
                                hyphenated_interaction_id in ts_file_content))
            self.assertTrue('oppia-response-%s.component.ts' %
                            hyphenated_interaction_id in ts_file_content
                            or ('oppia-response-%s.directive.ts' %
                                hyphenated_interaction_id in ts_file_content))
            self.assertTrue('oppia-short-response-%s.component.ts' %
                            hyphenated_interaction_id in ts_file_content
                            or ('oppia-short-response-%s.directive.ts' %
                                hyphenated_interaction_id in ts_file_content))
            self.assertIn('%s-rules.service.ts' % hyphenated_interaction_id,
                          ts_file_content)
            self.assertIn(
                '%s-validation.service.ts' % hyphenated_interaction_id,
                ts_file_content)

            self.assertNotIn('<script>', interaction_ts_file_content)
            self.assertNotIn('</script>', interaction_ts_file_content)
            self.assertNotIn('<script>', response_ts_file_content)
            self.assertNotIn('</script>', response_ts_file_content)
            self.assertNotIn('<script>', short_response_ts_file_content)
            self.assertNotIn('</script>', short_response_ts_file_content)
            self.assertNotIn('<script>', rules_service_ts_file_content)
            self.assertNotIn('</script>', rules_service_ts_file_content)
            self.assertNotIn('<script>', validation_service_ts_file_content)
            self.assertNotIn('</script>', validation_service_ts_file_content)

            interaction = interaction_registry.Registry.get_interaction_by_id(
                interaction_id)

            # Check that the specified interaction id is the same as the class
            # name.
            self.assertTrue(interaction_id, msg=interaction.__class__.__name__)

            # Check that the configuration file contains the correct
            # top-level keys, and that these keys have the correct types.
            for item, item_type in _INTERACTION_CONFIG_SCHEMA:
                self.assertTrue(
                    isinstance(getattr(interaction, item), item_type))
                if item_type == python_utils.BASESTRING:
                    self.assertTrue(getattr(interaction, item))

            self.assertIn(interaction.display_mode, base.ALLOWED_DISPLAY_MODES)

            if interaction.is_linear or interaction.is_terminal:
                self.assertIsNone(interaction.answer_type)
            else:
                # Check that the answer_type corresponds to a valid object
                # class.
                obj_services.Registry.get_object_class_by_type(
                    interaction.answer_type)

            self._validate_customization_arg_specs(
                interaction.customization_arg_specs)

            answer_visualization_specs = (
                interaction.answer_visualization_specs)
            self._validate_answer_visualization_specs(
                answer_visualization_specs)

            answer_visualizations = interaction.answer_visualizations
            for ind, visualization in enumerate(answer_visualizations):
                self.assertEqual(visualization.id,
                                 answer_visualization_specs[ind]['id'])
                self.assertEqual(
                    visualization.calculation_id,
                    answer_visualization_specs[ind]['calculation_id'])
                self.assertEqual(visualization.options,
                                 answer_visualization_specs[ind]['options'])

                # Check that the derived visualization is valid.
                visualization.validate()

            # Check that supplemental interactions have instructions, and
            # inline ones do not.
            if interaction.display_mode == base.DISPLAY_MODE_INLINE:
                self.assertIsNone(interaction.instructions)
                self.assertIsNone(interaction.narrow_instructions)
            else:
                self.assertTrue(
                    isinstance(interaction.instructions,
                               python_utils.BASESTRING))
                self.assertIsNotNone(interaction.instructions)
                self.assertIsNotNone(interaction.narrow_instructions)

            # Check that terminal interactions are not linear.
            if interaction.is_terminal:
                self.assertFalse(interaction.is_linear)

            # Check that only linear interactions have a
            # default_outcome_heading property.
            if interaction.is_linear:
                self.assertTrue(
                    isinstance(interaction.default_outcome_heading,
                               python_utils.BASESTRING)
                    and interaction.default_outcome_heading)
            else:
                self.assertIsNone(interaction.default_outcome_heading)

            # Check that interactions that can have solution cannot be linear.
            if interaction.can_have_solution:
                self.assertFalse(interaction.is_linear)

            default_object_values = obj_services.get_default_object_values()

            # Check that the rules for this interaction have object editor
            # templates and default values.
            for rule_name in list(interaction.rules_dict.keys()):
                param_list = interaction.get_rule_param_list(rule_name)

                for (_, param_obj_cls) in param_list:
                    # TODO(sll): Get rid of these special cases.
                    if param_obj_cls.__name__ in [
                            'NonnegativeInt', 'ListOfCodeEvaluation',
                            'ListOfCoordTwoDim', 'ListOfGraph',
                            'SetOfNormalizedString'
                    ]:
                        continue

                    # Check that the rule has a default value.
                    self.assertIn(param_obj_cls.__name__,
                                  default_object_values)
Beispiel #24
0
    def test_export_account_handler(self):
        # Update user settings to constants.
        user_id = self.get_user_id_from_email(self.EDITOR_EMAIL)
        user_settings = user_services.get_user_settings(user_id)
        user_settings.last_agreed_to_terms = self.GENERIC_DATE
        user_settings.last_logged_in = self.GENERIC_DATE
        user_settings.validate()
        user_models.UserSettingsModel(
            id=user_settings.user_id,
            gae_id=user_settings.gae_id,
            email=user_settings.email,
            role=user_settings.role,
            username=user_settings.username,
            normalized_username=user_settings.normalized_username,
            last_agreed_to_terms=user_settings.last_agreed_to_terms,
            last_started_state_editor_tutorial=(
                user_settings.last_started_state_editor_tutorial),
            last_started_state_translation_tutorial=(
                user_settings.last_started_state_translation_tutorial),
            last_logged_in=user_settings.last_logged_in,
            last_edited_an_exploration=user_settings.
            last_edited_an_exploration,
            last_created_an_exploration=(
                user_settings.last_created_an_exploration),
            profile_picture_data_url=user_settings.profile_picture_data_url,
            default_dashboard=user_settings.default_dashboard,
            creator_dashboard_display_pref=(
                user_settings.creator_dashboard_display_pref),
            user_bio=user_settings.user_bio,
            subject_interests=user_settings.subject_interests,
            first_contribution_msec=user_settings.first_contribution_msec,
            preferred_language_codes=user_settings.preferred_language_codes,
            preferred_site_language_code=(
                user_settings.preferred_site_language_code),
            preferred_audio_language_code=(
                user_settings.preferred_audio_language_code),
            deleted=user_settings.deleted).put()

        constants_swap = self.swap(constants, 'ENABLE_ACCOUNT_EXPORT', True)
        time_swap = self.swap(user_services, 'record_user_logged_in',
                              lambda *args: None)

        with constants_swap, time_swap:
            data = self.get_custom_response('/export-account-handler',
                                            'text/plain')

            # Check downloaded zip file.
            filename = 'oppia_takeout_data.zip'
            self.assertEqual(data.headers['Content-Disposition'],
                             'attachment; filename=%s' % filename)
            zf_saved = zipfile.ZipFile(
                python_utils.string_io(buffer_value=data.body))
            self.assertEqual(zf_saved.namelist(), [
                'oppia_takeout_data.json',
                'images/user_settings_profile_picture.png'
            ])

            # Load golden zip file.
            golden_zip_filepath = os.path.join(feconf.TESTS_DATA_DIR,
                                               'oppia_takeout_data.zip')
            with python_utils.open_file(golden_zip_filepath,
                                        'rb',
                                        encoding=None) as f:
                golden_zipfile = f.read()
            zf_gold = zipfile.ZipFile(
                python_utils.string_io(buffer_value=golden_zipfile))

            self.assertEqual(
                zf_saved.open('oppia_takeout_data.json').read(),
                zf_gold.open('oppia_takeout_data.json').read())
            self.assertEqual(
                zf_saved.open(
                    'images/user_settings_profile_picture.png').read(),
                zf_gold.open(
                    'images/user_settings_profile_picture.png').read())
Beispiel #25
0
import os
import re

import python_utils


def parse_json_from_js(js_file):
    """Extracts JSON object from JS file."""
    text = js_file.read()
    text_without_comments = remove_comments(text)
    first_bracket_index = text_without_comments.find('= {')
    last_bracket_index = text_without_comments.rfind('}')
    json_text = (text_without_comments[first_bracket_index +
                                       2:last_bracket_index + 1])
    return json.loads(json_text)


def remove_comments(text):
    """Removes comments from given text."""
    return re.sub(r'  //.*\n', r'', text)


class Constants(dict):
    """Transforms dict to object, attributes can be accessed by dot notation."""

    __getattr__ = dict.__getitem__


with python_utils.open_file(os.path.join('assets', 'constants.ts'), 'r') as f:
    constants = Constants(parse_json_from_js(f))
    def test_function_calls_on_windows(self):
        check_function_calls = {
            'ensure_pip_library_is_installed_is_called': False,
            'install_third_party_main_is_called': False,
            'setup_main_is_called': False,
            'setup_gae_main_is_called': False,
            'pre_commit_hook_main_is_called': False,
            'pre_push_hook_main_is_called': False,
            'tweak_yarn_executable_is_called': False
        }
        expected_check_function_calls = {
            'ensure_pip_library_is_installed_is_called': True,
            'install_third_party_main_is_called': True,
            'setup_main_is_called': True,
            'setup_gae_main_is_called': True,
            'pre_commit_hook_main_is_called': True,
            'pre_push_hook_main_is_called': False,
            'tweak_yarn_executable_is_called': True
        }

        def mock_ensure_pip_library_is_installed(unused_package,
                                                 unused_version, unused_path):
            check_function_calls[
                'ensure_pip_library_is_installed_is_called'] = True

        def mock_check_call(unused_cmd_tokens):
            pass

        def mock_main_for_install_third_party(args):  # pylint: disable=unused-argument
            check_function_calls['install_third_party_main_is_called'] = True

        def mock_main_for_setup(args):  # pylint: disable=unused-argument
            check_function_calls['setup_main_is_called'] = True

        def mock_main_for_setup_gae(args):  # pylint: disable=unused-argument
            check_function_calls['setup_gae_main_is_called'] = True

        def mock_main_for_pre_commit_hook(args):  # pylint: disable=unused-argument
            check_function_calls['pre_commit_hook_main_is_called'] = True

        def mock_main_for_pre_push_hook(args):  # pylint: disable=unused-argument
            check_function_calls['pre_push_hook_main_is_called'] = True

        def mock_tweak_yarn_executable():
            check_function_calls['tweak_yarn_executable_is_called'] = True

        ensure_pip_install_swap = self.swap(
            install_third_party_libs, 'ensure_pip_library_is_installed',
            mock_ensure_pip_library_is_installed)
        check_call_swap = self.swap(subprocess, 'check_call', mock_check_call)
        install_third_party_main_swap = self.swap(
            install_third_party, 'main', mock_main_for_install_third_party)
        setup_main_swap = self.swap(setup, 'main', mock_main_for_setup)
        setup_gae_main_swap = self.swap(setup_gae, 'main',
                                        mock_main_for_setup_gae)
        pre_commit_hook_main_swap = self.swap(pre_commit_hook, 'main',
                                              mock_main_for_pre_commit_hook)
        pre_push_hook_main_swap = self.swap(pre_push_hook, 'main',
                                            mock_main_for_pre_push_hook)
        tweak_yarn_executable_swap = self.swap(install_third_party_libs,
                                               'tweak_yarn_executable',
                                               mock_tweak_yarn_executable)
        os_name_swap = self.swap(common, 'OS_NAME', 'Windows')

        py_actual_text = ('ConverterMapping,\nLine ending with '
                          '"ConverterMapping",\nOther Line\n')
        py_expected_text = ('Line ending with \nOther Line\n')
        temp_py_config_file = tempfile.NamedTemporaryFile(prefix='py').name
        with python_utils.open_file(temp_py_config_file, 'w') as f:
            f.write(py_actual_text)

        pq_actual_text = (
            'ConverterMapping,\n"ConverterMapping",\nOther Line\n')
        pq_expected_text = ('Other Line\n')
        temp_pq_config_file = tempfile.NamedTemporaryFile(prefix='pq').name
        with python_utils.open_file(temp_pq_config_file, 'w') as f:
            f.write(pq_actual_text)

        py_config_swap = self.swap(install_third_party_libs,
                                   'PYLINT_CONFIGPARSER_FILEPATH',
                                   temp_py_config_file)
        pq_config_swap = self.swap(install_third_party_libs,
                                   'PQ_CONFIGPARSER_FILEPATH',
                                   temp_pq_config_file)

        with ensure_pip_install_swap, check_call_swap:
            with install_third_party_main_swap, setup_main_swap:
                with setup_gae_main_swap, pre_commit_hook_main_swap:
                    with pre_push_hook_main_swap, py_config_swap:
                        with pq_config_swap, tweak_yarn_executable_swap:
                            with os_name_swap:
                                install_third_party_libs.main()
        self.assertEqual(check_function_calls, expected_check_function_calls)
        with python_utils.open_file(temp_py_config_file, 'r') as f:
            self.assertEqual(f.read(), py_expected_text)
        with python_utils.open_file(temp_pq_config_file, 'r') as f:
            self.assertEqual(f.read(), pq_expected_text)
Beispiel #27
0
def _execute_deployment():
    """Executes the deployment process after doing the prerequisite checks.

    Raises:
        Exception: The deployment script is not run from a release branch.
        Exception: Current release version has '.' character.
        Exception: The mailgun API key is not added before deployment.
        Exception: Could not find third party directory.
        Exception: Invalid directory accessed during deployment.
        Exception: All the indexes have not been served before deployment.
        Exception: Build failed.
        Exception: Issue in library page loading.
        Exception: There is a major breakage.
    """

    install_third_party_libs.main(args=[])

    if not common.is_current_branch_a_release_branch():
        raise Exception(
            'The deployment script must be run from a release branch.')
    current_release_version = CURRENT_BRANCH_NAME[
        len(common.RELEASE_BRANCH_NAME_PREFIX):].replace('.', '-')

    # This is required to compose the release_version_library_url correctly.
    if '.' in current_release_version:
        raise Exception('Current release version has \'.\' character.')

    indexes_page_url = ('https://console.cloud.google.com/datastore/indexes'
                        '?project=%s') % APP_NAME
    release_version_library_url = ('https://%s-dot-%s.appspot.com/library' %
                                   (current_release_version, APP_NAME))
    memcache_url = ('https://pantheon.corp.google.com/appengine/memcache?'
                    'project=%s') % APP_NAME
    test_server_error_logs_url = (
        'https://console.cloud.google.com/logs/viewer?'
        'project=%s&key1=default&minLogLevel=500') % APP_NAME
    release_journal_url = ('https://drive.google.com/drive/folders/'
                           '0B9KSjiibL_WDNjJyYlEtbTNvY3c')
    issue_filing_url = 'https://github.com/oppia/oppia/milestone/39'

    # Do prerequisite checks.
    common.require_cwd_to_be_oppia()
    common.ensure_release_scripts_folder_exists_and_is_up_to_date()
    gcloud_adapter.require_gcloud_to_be_available()
    if APP_NAME in [APP_NAME_OPPIASERVER, APP_NAME_OPPIATESTSERVER]:
        if not common.is_current_branch_a_release_branch():
            raise Exception(
                'The deployment script must be run from a release branch.')
    if APP_NAME == APP_NAME_OPPIASERVER:
        with python_utils.open_file('./feconf.py', 'r') as f:
            feconf_contents = f.read()
            if ('MAILGUN_API_KEY' not in feconf_contents
                    or 'MAILGUN_API_KEY = None' in feconf_contents):
                raise Exception(
                    'The mailgun API key must be added before deployment.')
    if not os.path.exists(THIRD_PARTY_DIR):
        raise Exception(
            'Could not find third_party directory at %s. Please run '
            'install_third_party_libs.py prior to running this script.' %
            THIRD_PARTY_DIR)

    current_git_revision = subprocess.check_output(
        ['git', 'rev-parse', 'HEAD']).strip()

    # Create a folder in which to save the release candidate.
    python_utils.PRINT('Ensuring that the release directory parent exists')
    common.ensure_directory_exists(os.path.dirname(RELEASE_DIR_PATH))

    # Copy files to the release directory. Omits the .git subfolder.
    python_utils.PRINT('Copying files to the release directory')
    shutil.copytree(os.getcwd(),
                    RELEASE_DIR_PATH,
                    ignore=shutil.ignore_patterns('.git'))

    # Change the current directory to the release candidate folder.
    with common.CD(RELEASE_DIR_PATH):
        if not os.getcwd().endswith(RELEASE_DIR_NAME):
            raise Exception(
                'Invalid directory accessed during deployment: %s' %
                os.getcwd())

        python_utils.PRINT('Changing directory to %s' % os.getcwd())

        python_utils.PRINT('Preprocessing release...')
        preprocess_release()

        # Update indexes, then prompt for a check that they are all serving
        # before continuing with the deployment.
        # NOTE: This assumes that the build process does not modify the
        # index.yaml file or create a different version of it to use in
        # production.
        gcloud_adapter.update_indexes(INDEX_YAML_PATH, APP_NAME)
        if not gcloud_adapter.check_all_indexes_are_serving(APP_NAME):
            common.open_new_tab_in_browser_if_possible(indexes_page_url)
            raise Exception(
                'Please wait for all indexes to serve, then run this '
                'script again to complete the deployment. For details, '
                'visit the indexes page. Exiting.')

        # Do a build, while outputting to the terminal.
        python_utils.PRINT('Building and minifying scripts...')
        build_process = subprocess.Popen(
            ['python', '-m', 'scripts.build', '--prod_env'],
            stdout=subprocess.PIPE)
        while True:
            line = build_process.stdout.readline().strip()
            if not line:
                break
            python_utils.PRINT(line)
        # Wait for process to terminate, then check return code.
        build_process.communicate()
        if build_process.returncode > 0:
            raise Exception('Build failed.')

        # Deploy export service to GAE.
        gcloud_adapter.deploy_application('export/app.yaml', APP_NAME)
        # Deploy app to GAE.
        gcloud_adapter.deploy_application(
            './app.yaml',
            APP_NAME,
            version=(CUSTOM_VERSION
                     if CUSTOM_VERSION else current_release_version))

        # Writing log entry.
        common.ensure_directory_exists(os.path.dirname(LOG_FILE_PATH))
        with python_utils.open_file(LOG_FILE_PATH, 'a') as log_file:
            log_file.write(
                'Successfully deployed to %s at %s (version %s)\n' %
                (APP_NAME, CURRENT_DATETIME.strftime('%Y-%m-%d %H:%M:%S'),
                 current_git_revision))

        python_utils.PRINT('Returning to oppia/ root directory.')

    library_page_loads_correctly = check_errors_in_a_page(
        release_version_library_url, 'Library page is loading correctly?')
    if library_page_loads_correctly:
        gcloud_adapter.switch_version(APP_NAME, current_release_version)
        python_utils.PRINT('Successfully migrated traffic to release version!')
    else:
        raise Exception(
            'Aborting version switch due to issues in library page '
            'loading.')

    if not gcloud_adapter.flush_memcache(APP_NAME):
        python_utils.PRINT('Memcache flushing failed. Please do it manually.')
        common.open_new_tab_in_browser_if_possible(memcache_url)

    # If this is a test server deployment and the current release version is
    # already serving, open the library page (for sanity checking) and the GAE
    # error logs.
    currently_served_version = (
        gcloud_adapter.get_currently_served_version(APP_NAME))
    if (APP_NAME == APP_NAME_OPPIATESTSERVER
            or 'migration' in APP_NAME) and (currently_served_version
                                             == current_release_version):
        major_breakage = check_errors_in_a_page(test_server_error_logs_url,
                                                'Is anything major broken?')
        if major_breakage:
            common.open_new_tab_in_browser_if_possible(release_journal_url)
            common.open_new_tab_in_browser_if_possible(issue_filing_url)
            raise Exception(
                'Please note the issue in the release journal for this month, '
                'file a blocking bug and switch to the last known good '
                'version.')

    python_utils.PRINT('Done!')
def make_graph():
    """Creates an adjaceny list considering services as node and dependencies
    as edges.

    Returns:
        tuple(dict, set(str)). Adjancency list of the graph formed with
        services as nodes and dependencies as edges, set of all the services.
    """
    adj_list = collections.defaultdict(list)
    nodes_set = set()
    for dirname in DIRECTORY_NAMES:
        for root, _, filenames in os.walk(dirname):
            for filename in filenames:
                if filename.endswith(SERVICE_FILES_SUFFICES):
                    nodes_set.add(filename)
                    filepath = os.path.join(root, filename)
                    with python_utils.open_file(filepath, 'r') as f:
                        file_lines = f.readlines()

                    dep_lines = ''
                    index = 0

                    while index < len(file_lines):
                        line = file_lines[index]
                        if line.startswith('require'):
                            while not line.endswith(';\n'):
                                dep_lines = dep_lines + line
                                index += 1
                                line = file_lines[index]
                            dep_lines = dep_lines + line
                            index += 1
                        elif line.startswith('import'):
                            while not line.endswith(';\n'):
                                index += 1
                                line = file_lines[index]
                                if '\'' in line:
                                    break

                            dep_lines = dep_lines + (
                                'require (' +
                                line[line.find('\''):line.rfind('\'') + 1] +
                                ');\n')
                            index += 1
                        else:
                            index += 1

                    parsed_script = esprima.parseScript(dep_lines,
                                                        comment=True)
                    parsed_nodes = parsed_script.body
                    for parsed_node in parsed_nodes:
                        # For require statements.
                        if parsed_node.type == 'ExpressionStatement' and (
                                parsed_node.expression.callee.name
                                == ('require')):
                            arguments = parsed_node.expression.arguments
                            for argument in arguments:
                                dep_path = argument.value
                                if argument.operator == '+':
                                    dep_path = (argument.left.value +
                                                argument.right.value)
                                if not dep_path.endswith('.ts'):
                                    dep_path = dep_path + '.ts'
                                if dep_path.endswith(SERVICE_FILES_SUFFICES):
                                    dep_name = os.path.basename(dep_path)
                                    adj_list[dep_name].append(filename)

    return (adj_list, nodes_set)
Beispiel #29
0
def preprocess_release():
    """Pre-processes release files.

    This function should be called from within RELEASE_DIR_NAME. Currently it
    does the following:

    (1) Substitutes files from the per-app deployment data.
    (2) Change the DEV_MODE constant in assets/constants.ts.
    (3) Change GCS_RESOURCE_BUCKET in assets/constants.ts.
    (4) Removes the "version" field from app.yaml, since gcloud does not like
        it (when deploying).

    Raises:
        Exception: Could not find deploy data directory.
        Exception: Could not find source path.
        Exception: Could not find destination path.
    """
    if not os.path.exists(DEPLOY_DATA_PATH):
        raise Exception(
            'Could not find deploy_data directory at %s' % DEPLOY_DATA_PATH)

    # Copies files in root folder to assets/.
    for filename in FILES_AT_ROOT:
        src = os.path.join(DEPLOY_DATA_PATH, filename)
        dst = os.path.join(os.getcwd(), 'assets', filename)
        if not os.path.exists(src):
            raise Exception(
                'Could not find source path %s. Please check your deploy_data '
                'folder.' % src)
        if not os.path.exists(dst):
            raise Exception(
                'Could not find destination path %s. Has the code been '
                'updated in the meantime?' % dst)
        shutil.copyfile(src, dst)

    # Copies files in images to /assets/images.
    for dir_name in IMAGE_DIRS:
        src_dir = os.path.join(DEPLOY_DATA_PATH, 'images', dir_name)
        dst_dir = os.path.join(os.getcwd(), 'assets', 'images', dir_name)

        if not os.path.exists(src_dir):
            raise Exception(
                'Could not find source dir %s. Please check your deploy_data '
                'folder.' % src_dir)
        common.ensure_directory_exists(dst_dir)

        for filename in os.listdir(src_dir):
            src = os.path.join(src_dir, filename)
            dst = os.path.join(dst_dir, filename)
            shutil.copyfile(src, dst)

    # Changes the DEV_MODE constant in assets/constants.ts.
    with python_utils.open_file(
        os.path.join('assets', 'constants.ts'), 'r') as assets_file:
        content = assets_file.read()
    bucket_name = APP_NAME + BUCKET_NAME_SUFFIX
    assert '"DEV_MODE": true' in content
    assert '"GCS_RESOURCE_BUCKET_NAME": "None-resources",' in content
    os.remove(os.path.join('assets', 'constants.ts'))
    content = content.replace('"DEV_MODE": true', '"DEV_MODE": false')
    content = content.replace(
        '"GCS_RESOURCE_BUCKET_NAME": "None-resources",',
        '"GCS_RESOURCE_BUCKET_NAME": "%s",' % bucket_name)
    with python_utils.open_file(
        os.path.join('assets', 'constants.ts'), 'w+') as new_assets_file:
        new_assets_file.write(content)
Beispiel #30
0
    def test_finds_hanging_indent(self):
        checker_test_object = testutils.CheckerTestCase()
        checker_test_object.CHECKER_CLASS = (
            pylint_extensions.HangingIndentChecker)
        checker_test_object.setup_method()
        node_break_after_hanging_indent = astroid.scoped_nodes.Module(
            name='test', doc='Custom test')
        temp_file = tempfile.NamedTemporaryFile()
        filename = temp_file.name
        with python_utils.open_file(filename, 'w') as tmp:
            tmp.write(u"""self.post_json('/ml/trainedclassifierhandler',
                self.payload, expect_errors=True, expected_status_int=401)
                """)
        node_break_after_hanging_indent.file = filename
        node_break_after_hanging_indent.path = filename

        checker_test_object.checker.process_module(
            node_break_after_hanging_indent)

        with checker_test_object.assertAddsMessages(
                testutils.Message(msg_id='no-break-after-hanging-indent',
                                  line=1), ):
            temp_file.close()

        node_with_no_error_message = astroid.scoped_nodes.Module(
            name='test', doc='Custom test')

        temp_file = tempfile.NamedTemporaryFile()
        filename = temp_file.name
        with python_utils.open_file(filename, 'w') as tmp:
            tmp.write(u"""\"""Some multiline
                docstring.
                \"""
                # Load JSON.
                master_translation_dict = json.loads(
                utils.get_file_contents(os.path.join(
                os.getcwd(), 'assets', 'i18n', 'en.json')))
                """)
        node_with_no_error_message.file = filename
        node_with_no_error_message.path = filename

        checker_test_object.checker.process_module(node_with_no_error_message)

        with checker_test_object.assertNoMessages():
            temp_file.close()

        node_with_no_error_message = astroid.scoped_nodes.Module(
            name='test', doc='Custom test')

        temp_file = tempfile.NamedTemporaryFile()
        filename = temp_file.name
        with python_utils.open_file(filename, 'w') as tmp:
            tmp.write(u"""self.post_json('/',
                self.payload, expect_errors=True, expected_status_int=401)""")
        node_with_no_error_message.file = filename
        node_with_no_error_message.path = filename

        checker_test_object.checker.process_module(node_with_no_error_message)

        with checker_test_object.assertNoMessages():
            temp_file.close()