def test_server_running(force: bool = False,
                        external_host: str = 'testserver',
                        log_file: Optional[str] = None,
                        dots: bool = False,
                        use_db: bool = True) -> Iterator[None]:
    log = sys.stdout
    if log_file:
        if os.path.exists(log_file) and os.path.getsize(log_file) < 100000:
            log = open(log_file, 'a')
            log.write('\n\n')
        else:
            log = open(log_file, 'w')

    set_up_django(external_host)

    if use_db:
        generate_fixtures_command = ['tools/setup/generate-fixtures']
        if not is_template_database_current():
            generate_fixtures_command.append('--force')
        subprocess.check_call(generate_fixtures_command)

    # Run this not through the shell, so that we have the actual PID.
    run_dev_server_command = ['tools/run-dev.py', '--test']
    if force:
        run_dev_server_command.append('--force')
    server = subprocess.Popen(run_dev_server_command, stdout=log, stderr=log)

    try:
        # Wait for the server to start up.
        sys.stdout.write('\nWaiting for test server (may take a while)')
        if not dots:
            sys.stdout.write('\n\n')
        while not server_is_up(server, log_file):
            if dots:
                sys.stdout.write('.')
                sys.stdout.flush()
            time.sleep(0.1)
        sys.stdout.write('\n\n--- SERVER IS UP! ---\n\n')

        # DO OUR ACTUAL TESTING HERE!!!
        yield

    finally:
        assert_server_running(server, log_file)
        server.terminate()
Exemple #2
0
def test_server_running(force: bool=False, external_host: str='testserver',
                        log_file: Optional[str]=None, dots: bool=False, use_db: bool=True
                        ) -> Iterator[None]:
    log = sys.stdout
    if log_file:
        if os.path.exists(log_file) and os.path.getsize(log_file) < 100000:
            log = open(log_file, 'a')
            log.write('\n\n')
        else:
            log = open(log_file, 'w')

    set_up_django(external_host)

    if use_db:
        generate_fixtures_command = ['tools/setup/generate-fixtures']
        if not is_template_database_current():
            generate_fixtures_command.append('--force')
        subprocess.check_call(generate_fixtures_command)

    # Run this not through the shell, so that we have the actual PID.
    run_dev_server_command = ['tools/run-dev.py', '--test']
    if force:
        run_dev_server_command.append('--force')
    server = subprocess.Popen(run_dev_server_command,
                              stdout=log, stderr=log)

    try:
        # Wait for the server to start up.
        sys.stdout.write('\nWaiting for test server (may take a while)')
        if not dots:
            sys.stdout.write('\n\n')
        while not server_is_up(server, log_file):
            if dots:
                sys.stdout.write('.')
                sys.stdout.flush()
            time.sleep(0.1)
        sys.stdout.write('\n\n--- SERVER IS UP! ---\n\n')

        # DO OUR ACTUAL TESTING HERE!!!
        yield

    finally:
        assert_server_running(server, log_file)
        server.terminate()
Exemple #3
0
def main(options):
    # type: (Any) -> int

    # yarn and management commands expect to be run from the root of the
    # project.
    os.chdir(ZULIP_PATH)

    # setup-apt-repo does an `apt-get update`
    # hash the apt dependencies
    sha_sum = hashlib.sha1()

    for apt_depedency in APT_DEPENDENCIES[codename]:
        sha_sum.update(apt_depedency.encode('utf8'))
    # hash the content of setup-apt-repo
    sha_sum.update(open('scripts/lib/setup-apt-repo', 'rb').read())

    new_apt_dependencies_hash = sha_sum.hexdigest()
    last_apt_dependencies_hash = None
    apt_hash_file_path = os.path.join(UUID_VAR_PATH, "apt_dependencies_hash")
    try:
        hash_file = open(apt_hash_file_path, 'r')
        last_apt_dependencies_hash = hash_file.read()
    except IOError:
        run(['touch', apt_hash_file_path])

    if (new_apt_dependencies_hash != last_apt_dependencies_hash):
        try:
            install_apt_deps()
        except subprocess.CalledProcessError:
            # Might be a failure due to network connection issues. Retrying...
            print(WARNING + "`apt-get -y install` failed while installing dependencies; retrying..." + ENDC)
            # Since a common failure mode is for the caching in
            # `setup-apt-repo` to optimize the fast code path to skip
            # running `apt-get update` when the target apt repository
            # is out of date, we run it explicitly here so that we
            # recover automatically.
            run(['sudo', 'apt-get', 'update'])
            install_apt_deps()
        hash_file = open(apt_hash_file_path, 'w')
        hash_file.write(new_apt_dependencies_hash)
    else:
        print("No changes to apt dependencies, so skipping apt operations.")

    # Here we install node.
    run(["sudo", "-H", "scripts/lib/install-node"])

    # This is a wrapper around `yarn`, which we run last since
    # it can often fail due to network issues beyond our control.
    try:
        # Hack: We remove `node_modules` as root to work around an
        # issue with the symlinks being improperly owned by root.
        if os.path.islink("node_modules"):
            run(["sudo", "rm", "-f", "node_modules"])
        run(["sudo", "mkdir", "-p", NODE_MODULES_CACHE_PATH])
        run(["sudo", "chown", "%s:%s" % (user_id, user_id), NODE_MODULES_CACHE_PATH])
        setup_node_modules(prefer_offline=True)
    except subprocess.CalledProcessError:
        print(WARNING + "`yarn install` failed; retrying..." + ENDC)
        setup_node_modules()

    # Import tools/setup_venv.py instead of running it so that we get an
    # activated virtualenv for the rest of the provisioning process.
    from tools.setup import setup_venvs
    setup_venvs.main()

    setup_shell_profile('~/.bash_profile')
    setup_shell_profile('~/.zprofile')

    run(["sudo", "cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])

    # create log directory `zulip/var/log`
    run(["mkdir", "-p", LOG_DIR_PATH])
    # create upload directory `var/uploads`
    run(["mkdir", "-p", UPLOAD_DIR_PATH])
    # create test upload directory `var/test_upload`
    run(["mkdir", "-p", TEST_UPLOAD_DIR_PATH])
    # create coverage directory`var/coverage`
    run(["mkdir", "-p", COVERAGE_DIR_PATH])
    # create linecoverage directory`var/linecoverage-report`
    run(["mkdir", "-p", LINECOVERAGE_DIR_PATH])
    # create linecoverage directory`var/node-coverage`
    run(["mkdir", "-p", NODE_TEST_COVERAGE_DIR_PATH])

    # `build_emoji` script requires `emoji-datasource` package which we install
    # via npm and hence it should be executed after we are done installing npm
    # packages.
    if not os.path.isdir(EMOJI_CACHE_PATH):
        run(["sudo", "mkdir", EMOJI_CACHE_PATH])
    run(["sudo", "chown", "%s:%s" % (user_id, user_id), EMOJI_CACHE_PATH])
    run(["tools/setup/emoji/build_emoji"])

    # copy over static files from the zulip_bots package
    run(["tools/setup/generate_zulip_bots_static_files"])

    run(["tools/generate-custom-icon-webfont"])
    run(["tools/setup/build_pygments_data"])
    run(["scripts/setup/generate_secrets.py", "--development"])
    run(["tools/update-authors-json", "--use-fixture"])
    run(["tools/inline-email-css"])
    if is_circleci or (is_travis and not options.is_production_travis):
        run(["sudo", "service", "rabbitmq-server", "restart"])
        run(["sudo", "service", "redis-server", "restart"])
        run(["sudo", "service", "memcached", "restart"])
        run(["sudo", "service", "postgresql", "restart"])
    elif options.is_docker:
        run(["sudo", "service", "rabbitmq-server", "restart"])
        run(["sudo", "pg_dropcluster", "--stop", POSTGRES_VERSION, "main"])
        run(["sudo", "pg_createcluster", "-e", "utf8", "--start", POSTGRES_VERSION, "main"])
        run(["sudo", "service", "redis-server", "restart"])
        run(["sudo", "service", "memcached", "restart"])
    if not options.is_production_travis:
        # The following block is skipped for the production Travis
        # suite, because that suite doesn't make use of these elements
        # of the development environment (it just uses the development
        # environment to build a release tarball).

        # Need to set up Django before using is_template_database_current
        os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
        import django
        django.setup()

        from zerver.lib.test_fixtures import is_template_database_current

        try:
            from zerver.lib.queue import SimpleQueueClient
            SimpleQueueClient()
            rabbitmq_is_configured = True
        except Exception:
            rabbitmq_is_configured = False

        if options.is_force or not rabbitmq_is_configured:
            run(["scripts/setup/configure-rabbitmq"])
        else:
            print("RabbitMQ is already configured.")

        migration_status_path = os.path.join(UUID_VAR_PATH, "migration_status_dev")
        if options.is_force or not is_template_database_current(
                migration_status=migration_status_path,
                settings="zproject.settings",
                database_name="zulip",
        ):
            run(["tools/setup/postgres-init-dev-db"])
            run(["tools/do-destroy-rebuild-database"])
        else:
            print("No need to regenerate the dev DB.")

        if options.is_force or not is_template_database_current():
            run(["tools/setup/postgres-init-test-db"])
            run(["tools/do-destroy-rebuild-test-database"])
        else:
            print("No need to regenerate the test DB.")

        # Consider updating generated translations data: both `.mo`
        # files and `language-options.json`.
        sha1sum = hashlib.sha1()
        paths = ['zerver/management/commands/compilemessages.py']
        paths += glob.glob('static/locale/*/LC_MESSAGES/*.po')
        paths += glob.glob('static/locale/*/translations.json')

        for path in paths:
            with open(path, 'rb') as file_to_hash:
                sha1sum.update(file_to_hash.read())

        compilemessages_hash_path = os.path.join(UUID_VAR_PATH, "last_compilemessages_hash")
        new_compilemessages_hash = sha1sum.hexdigest()
        run(['touch', compilemessages_hash_path])
        with open(compilemessages_hash_path, 'r') as hash_file:
            last_compilemessages_hash = hash_file.read()

        if options.is_force or (new_compilemessages_hash != last_compilemessages_hash):
            with open(compilemessages_hash_path, 'w') as hash_file:
                hash_file.write(new_compilemessages_hash)
            run(["./manage.py", "compilemessages"])
        else:
            print("No need to run `manage.py compilemessages`.")

        run(["./manage.py", "create_realm_internal_bots"])  # Creates realm internal bots if required.

    run(["scripts/lib/clean-unused-caches"])

    version_file = os.path.join(UUID_VAR_PATH, 'provision_version')
    print('writing to %s\n' % (version_file,))
    open(version_file, 'w').write(PROVISION_VERSION + '\n')

    print()
    print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
    return 0