示例#1
0
def main(options):
    # type: (Any) -> int

    # yarn and management commands expect to be run from the root of the
    # project.
    os.chdir(ZULIP_PATH)

    # hash the apt dependencies
    sha_sum = hashlib.sha1()

    for apt_depedency in SYSTEM_DEPENDENCIES:
        sha_sum.update(apt_depedency.encode('utf8'))
    if vendor in ["Ubuntu", "Debian"]:
        sha_sum.update(open('scripts/lib/setup-apt-repo', 'rb').read())
    else:
        # hash the content of setup-yum-repo and build-*
        sha_sum.update(open('scripts/lib/setup-yum-repo', 'rb').read())
        build_paths = glob.glob("scripts/lib/build-")
        for bp in build_paths:
            sha_sum.update(open(bp, 'rb').read())

    new_apt_dependencies_hash = sha_sum.hexdigest()
    last_apt_dependencies_hash = None
    apt_hash_file_path = os.path.join(UUID_VAR_PATH, "apt_dependencies_hash")
    with open(apt_hash_file_path, 'a+') as hash_file:
        hash_file.seek(0)
        last_apt_dependencies_hash = hash_file.read()

    if (new_apt_dependencies_hash != last_apt_dependencies_hash):
        try:
            install_system_deps()
        except subprocess.CalledProcessError:
            # Might be a failure due to network connection issues. Retrying...
            install_system_deps(retry=True)
        with open(apt_hash_file_path, 'w') as hash_file:
            hash_file.write(new_apt_dependencies_hash)
    else:
        print("No changes to apt dependencies, so skipping apt operations.")

    # Here we install node.
    proxy_env = [
        "env",
        "http_proxy=" + os.environ.get("http_proxy", ""),
        "https_proxy=" + os.environ.get("https_proxy", ""),
        "no_proxy=" + os.environ.get("no_proxy", ""),
    ]
    run_as_root(proxy_env + ["scripts/lib/install-node"], sudo_args = ['-H'])

    # This is a wrapper around `yarn`, which we run last since
    # it can often fail due to network issues beyond our control.
    try:
        # Hack: We remove `node_modules` as root to work around an
        # issue with the symlinks being improperly owned by root.
        if os.path.islink("node_modules"):
            run_as_root(["rm", "-f", "node_modules"])
        run_as_root(["mkdir", "-p", NODE_MODULES_CACHE_PATH])
        run_as_root(["chown", "%s:%s" % (user_id, user_id), NODE_MODULES_CACHE_PATH])
        setup_node_modules(prefer_offline=True)
    except subprocess.CalledProcessError:
        print(WARNING + "`yarn install` failed; retrying..." + ENDC)
        setup_node_modules()

    # Install shellcheck.
    run_as_root(["scripts/lib/install-shellcheck"])

    from tools.setup import setup_venvs
    setup_venvs.main()

    activate_this = "/srv/zulip-py3-venv/bin/activate_this.py"
    exec(open(activate_this).read(), {}, dict(__file__=activate_this))

    setup_shell_profile('~/.bash_profile')
    setup_shell_profile('~/.zprofile')

    # This needs to happen before anything that imports zproject.settings.
    run(["scripts/setup/generate_secrets.py", "--development"])

    run_as_root(["cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])

    # create log directory `zulip/var/log`
    os.makedirs(LOG_DIR_PATH, exist_ok=True)
    # create upload directory `var/uploads`
    os.makedirs(UPLOAD_DIR_PATH, exist_ok=True)
    # create test upload directory `var/test_upload`
    os.makedirs(TEST_UPLOAD_DIR_PATH, exist_ok=True)
    # create coverage directory `var/coverage`
    os.makedirs(COVERAGE_DIR_PATH, exist_ok=True)
    # create linecoverage directory `var/node-coverage`
    os.makedirs(NODE_TEST_COVERAGE_DIR_PATH, exist_ok=True)

    # The `build_emoji` script requires `emoji-datasource` package
    # which we install via npm; thus this step is after installing npm
    # packages.
    if not os.path.isdir(EMOJI_CACHE_PATH):
        run_as_root(["mkdir", EMOJI_CACHE_PATH])
    run_as_root(["chown", "%s:%s" % (user_id, user_id), EMOJI_CACHE_PATH])
    run(["tools/setup/emoji/build_emoji"])

    # copy over static files from the zulip_bots package
    generate_zulip_bots_static_files()

    webfont_paths = ["tools/setup/generate-custom-icon-webfont", "static/icons/fonts/template.hbs"]
    webfont_paths += glob.glob('static/assets/icons/*')
    if file_or_package_hash_updated(webfont_paths, "webfont_files_hash", options.is_force):
        run(["tools/setup/generate-custom-icon-webfont"])
    else:
        print("No need to run `tools/setup/generate-custom-icon-webfont`.")

    build_pygments_data_paths = ["tools/setup/build_pygments_data", "tools/setup/lang.json"]
    from pygments import __version__ as pygments_version
    if file_or_package_hash_updated(build_pygments_data_paths, "build_pygments_data_hash", options.is_force,
                                    [pygments_version]):
        run(["tools/setup/build_pygments_data"])
    else:
        print("No need to run `tools/setup/build_pygments_data`.")

    update_authors_json_paths = ["tools/update-authors-json", "zerver/tests/fixtures/authors.json"]
    if file_or_package_hash_updated(update_authors_json_paths, "update_authors_json_hash", options.is_force):
        run(["tools/update-authors-json", "--use-fixture"])
    else:
        print("No need to run `tools/update-authors-json`.")

    email_source_paths = ["tools/inline-email-css", "templates/zerver/emails/email.css"]
    email_source_paths += glob.glob('templates/zerver/emails/*.source.html')
    if file_or_package_hash_updated(email_source_paths, "last_email_source_files_hash", options.is_force):
        run(["tools/inline-email-css"])
    else:
        print("No need to run `tools/inline-email-css`.")

    if is_circleci or (is_travis and not options.is_production_travis):
        run_as_root(["service", "rabbitmq-server", "restart"])
        run_as_root(["service", "redis-server", "restart"])
        run_as_root(["service", "memcached", "restart"])
        run_as_root(["service", "postgresql", "restart"])
    elif family == 'redhat':
        for service in ["postgresql-%s" % (POSTGRES_VERSION,), "rabbitmq-server", "memcached", "redis"]:
            run_as_root(["systemctl", "enable", service], sudo_args = ['-H'])
            run_as_root(["systemctl", "start", service], sudo_args = ['-H'])
    elif options.is_docker:
        run_as_root(["service", "rabbitmq-server", "restart"])
        run_as_root(["pg_dropcluster", "--stop", POSTGRES_VERSION, "main"])
        run_as_root(["pg_createcluster", "-e", "utf8", "--start", POSTGRES_VERSION, "main"])
        run_as_root(["service", "redis-server", "restart"])
        run_as_root(["service", "memcached", "restart"])
    if not options.is_production_travis:
        # The following block is skipped for the production Travis
        # suite, because that suite doesn't make use of these elements
        # of the development environment (it just uses the development
        # environment to build a release tarball).

        # Need to set up Django before using template_database_status
        os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
        import django
        django.setup()

        from zerver.lib.test_fixtures import template_database_status, run_db_migrations

        try:
            from zerver.lib.queue import SimpleQueueClient
            SimpleQueueClient()
            rabbitmq_is_configured = True
        except Exception:
            rabbitmq_is_configured = False

        if options.is_force or not rabbitmq_is_configured:
            run(["scripts/setup/configure-rabbitmq"])
        else:
            print("RabbitMQ is already configured.")

        migration_status_path = os.path.join(UUID_VAR_PATH, "migration_status_dev")
        dev_template_db_status = template_database_status(
            migration_status=migration_status_path,
            settings="zproject.settings",
            database_name="zulip",
        )
        if options.is_force or dev_template_db_status == 'needs_rebuild':
            run(["tools/setup/postgres-init-dev-db"])
            run(["tools/do-destroy-rebuild-database"])
        elif dev_template_db_status == 'run_migrations':
            run_db_migrations('dev')
        elif dev_template_db_status == 'current':
            print("No need to regenerate the dev DB.")

        test_template_db_status = template_database_status()
        if options.is_force or test_template_db_status == 'needs_rebuild':
            run(["tools/setup/postgres-init-test-db"])
            run(["tools/do-destroy-rebuild-test-database"])
        elif test_template_db_status == 'run_migrations':
            run_db_migrations('test')
        elif test_template_db_status == 'current':
            print("No need to regenerate the test DB.")

        # Consider updating generated translations data: both `.mo`
        # files and `language-options.json`.
        paths = ['zerver/management/commands/compilemessages.py']
        paths += glob.glob('static/locale/*/LC_MESSAGES/*.po')
        paths += glob.glob('static/locale/*/translations.json')

        if file_or_package_hash_updated(paths, "last_compilemessages_hash", options.is_force):
            run(["./manage.py", "compilemessages"])
        else:
            print("No need to run `manage.py compilemessages`.")

    run(["scripts/lib/clean-unused-caches"])

    version_file = os.path.join(UUID_VAR_PATH, 'provision_version')
    print('writing to %s\n' % (version_file,))
    open(version_file, 'w').write(PROVISION_VERSION + '\n')

    print()
    print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
    return 0
示例#2
0
def main(options):
    # type: (argparse.Namespace) -> NoReturn

    # yarn and management commands expect to be run from the root of the
    # project.
    os.chdir(ZULIP_PATH)

    # hash the apt dependencies
    sha_sum = hashlib.sha1()

    for apt_depedency in SYSTEM_DEPENDENCIES:
        sha_sum.update(apt_depedency.encode('utf8'))
    if vendor in ["ubuntu", "debian"]:
        sha_sum.update(open('scripts/lib/setup-apt-repo', 'rb').read())
    else:
        # hash the content of setup-yum-repo and build-*
        sha_sum.update(open('scripts/lib/setup-yum-repo', 'rb').read())
        build_paths = glob.glob("scripts/lib/build-")
        for bp in build_paths:
            sha_sum.update(open(bp, 'rb').read())

    new_apt_dependencies_hash = sha_sum.hexdigest()
    last_apt_dependencies_hash = None
    apt_hash_file_path = os.path.join(UUID_VAR_PATH, "apt_dependencies_hash")
    with open(apt_hash_file_path, 'a+') as hash_file:
        hash_file.seek(0)
        last_apt_dependencies_hash = hash_file.read()

    if (new_apt_dependencies_hash != last_apt_dependencies_hash):
        try:
            install_system_deps()
        except subprocess.CalledProcessError:
            # Might be a failure due to network connection issues. Retrying...
            print(WARNING +
                  "Installing system dependencies failed; retrying..." + ENDC)
            install_system_deps()
        with open(apt_hash_file_path, 'w') as hash_file:
            hash_file.write(new_apt_dependencies_hash)
    else:
        print("No changes to apt dependencies, so skipping apt operations.")

    # Here we install node.
    proxy_env = [
        "env",
        "http_proxy=" + os.environ.get("http_proxy", ""),
        "https_proxy=" + os.environ.get("https_proxy", ""),
        "no_proxy=" + os.environ.get("no_proxy", ""),
    ]
    run_as_root(proxy_env + ["scripts/lib/install-node"], sudo_args=['-H'])

    if not os.access(NODE_MODULES_CACHE_PATH, os.W_OK):
        run_as_root(["mkdir", "-p", NODE_MODULES_CACHE_PATH])
        run_as_root([
            "chown",
            "%s:%s" % (os.getuid(), os.getgid()), NODE_MODULES_CACHE_PATH
        ])

    # This is a wrapper around `yarn`, which we run last since
    # it can often fail due to network issues beyond our control.
    try:
        setup_node_modules(prefer_offline=True)
    except subprocess.CalledProcessError:
        print(WARNING + "`yarn install` failed; retrying..." + ENDC)
        try:
            setup_node_modules()
        except subprocess.CalledProcessError:
            print(
                FAIL +
                "`yarn install` is failing; check your network connection (and proxy settings)."
                + ENDC)
            sys.exit(1)

    # Install shellcheck.
    run_as_root(["scripts/lib/install-shellcheck"])

    setup_venvs.main()

    run_as_root(["cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])

    if is_circleci or (is_travis and not options.is_production_travis):
        run_as_root(["service", "rabbitmq-server", "restart"])
        run_as_root(["service", "redis-server", "restart"])
        run_as_root(["service", "memcached", "restart"])
        run_as_root(["service", "postgresql", "restart"])
    elif family == 'redhat':
        for service in [
                "postgresql-%s" % (POSTGRES_VERSION, ), "rabbitmq-server",
                "memcached", "redis"
        ]:
            run_as_root(["systemctl", "enable", service], sudo_args=['-H'])
            run_as_root(["systemctl", "start", service], sudo_args=['-H'])

    # If we imported modules after activating the virtualenv in this
    # Python process, they could end up mismatching with modules we’ve
    # already imported from outside the virtualenv.  That seems like a
    # bad idea, and empirically it can cause Python to segfault on
    # certain cffi-related imports.  Instead, start a new Python
    # process inside the virtualenv.
    activate_this = "/srv/zulip-py3-venv/bin/activate_this.py"
    provision_inner = os.path.join(ZULIP_PATH, "tools", "lib",
                                   "provision_inner.py")
    exec(open(activate_this).read(), dict(__file__=activate_this))
    os.execvp(provision_inner, [
        provision_inner,
        *(["--force"] if options.is_force else []),
        *(["--production-travis"] if options.is_production_travis else []),
    ])
示例#3
0
def main(options):
    # type: (Any) -> int

    # yarn and management commands expect to be run from the root of the
    # project.
    os.chdir(ZULIP_PATH)

    # setup-apt-repo does an `apt-get update`
    # hash the apt dependencies
    sha_sum = hashlib.sha1()

    for apt_depedency in APT_DEPENDENCIES[codename]:
        sha_sum.update(apt_depedency.encode('utf8'))
    # hash the content of setup-apt-repo
    sha_sum.update(open('scripts/lib/setup-apt-repo', 'rb').read())

    new_apt_dependencies_hash = sha_sum.hexdigest()
    last_apt_dependencies_hash = None
    apt_hash_file_path = os.path.join(UUID_VAR_PATH, "apt_dependencies_hash")
    try:
        with open(apt_hash_file_path, 'r') as hash_file:
            last_apt_dependencies_hash = hash_file.read()
    except IOError:
        run(['touch', apt_hash_file_path])

    if (new_apt_dependencies_hash != last_apt_dependencies_hash):
        try:
            install_apt_deps()
        except subprocess.CalledProcessError:
            # Might be a failure due to network connection issues. Retrying...
            print(
                WARNING +
                "`apt-get -y install` failed while installing dependencies; retrying..."
                + ENDC)
            # Since a common failure mode is for the caching in
            # `setup-apt-repo` to optimize the fast code path to skip
            # running `apt-get update` when the target apt repository
            # is out of date, we run it explicitly here so that we
            # recover automatically.
            run(['sudo', 'apt-get', 'update'])
            install_apt_deps()
        with open(apt_hash_file_path, 'w') as hash_file:
            hash_file.write(new_apt_dependencies_hash)
    else:
        print("No changes to apt dependencies, so skipping apt operations.")

    # Here we install node.
    run(["sudo", "-H", "scripts/lib/install-node"])

    # This is a wrapper around `yarn`, which we run last since
    # it can often fail due to network issues beyond our control.
    try:
        # Hack: We remove `node_modules` as root to work around an
        # issue with the symlinks being improperly owned by root.
        if os.path.islink("node_modules"):
            run(["sudo", "rm", "-f", "node_modules"])
        run(["sudo", "mkdir", "-p", NODE_MODULES_CACHE_PATH])
        run([
            "sudo", "chown",
            "%s:%s" % (user_id, user_id), NODE_MODULES_CACHE_PATH
        ])
        setup_node_modules(prefer_offline=True)
    except subprocess.CalledProcessError:
        print(WARNING + "`yarn install` failed; retrying..." + ENDC)
        setup_node_modules()

    # Import tools/setup_venv.py instead of running it so that we get an
    # activated virtualenv for the rest of the provisioning process.
    from tools.setup import setup_venvs
    setup_venvs.main()

    setup_shell_profile('~/.bash_profile')
    setup_shell_profile('~/.zprofile')

    run(["sudo", "cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])

    # create log directory `zulip/var/log`
    run(["mkdir", "-p", LOG_DIR_PATH])
    # create upload directory `var/uploads`
    run(["mkdir", "-p", UPLOAD_DIR_PATH])
    # create test upload directory `var/test_upload`
    run(["mkdir", "-p", TEST_UPLOAD_DIR_PATH])
    # create coverage directory`var/coverage`
    run(["mkdir", "-p", COVERAGE_DIR_PATH])
    # create linecoverage directory`var/node-coverage`
    run(["mkdir", "-p", NODE_TEST_COVERAGE_DIR_PATH])

    # `build_emoji` script requires `emoji-datasource` package which we install
    # via npm and hence it should be executed after we are done installing npm
    # packages.
    if not os.path.isdir(EMOJI_CACHE_PATH):
        run(["sudo", "mkdir", EMOJI_CACHE_PATH])
    run(["sudo", "chown", "%s:%s" % (user_id, user_id), EMOJI_CACHE_PATH])
    run(["tools/setup/emoji/build_emoji"])

    # copy over static files from the zulip_bots package
    run(["tools/setup/generate_zulip_bots_static_files"])

    run(["tools/generate-custom-icon-webfont"])
    run(["tools/setup/build_pygments_data"])
    run(["scripts/setup/generate_secrets.py", "--development"])
    run(["tools/update-authors-json", "--use-fixture"])
    email_source_paths = [
        "tools/inline-email-css", "templates/zerver/emails/email.css"
    ]
    email_source_paths += glob.glob('templates/zerver/emails/*.source.html')
    if file_hash_updated(email_source_paths, "last_email_source_files_hash",
                         options.is_force):
        run(["tools/inline-email-css"])
    else:
        print("No need to run `tools/inline-email-css`.")
    if is_circleci or (is_travis and not options.is_production_travis):
        run(["sudo", "service", "rabbitmq-server", "restart"])
        run(["sudo", "service", "redis-server", "restart"])
        run(["sudo", "service", "memcached", "restart"])
        run(["sudo", "service", "postgresql", "restart"])
    elif options.is_docker:
        run(["sudo", "service", "rabbitmq-server", "restart"])
        run(["sudo", "pg_dropcluster", "--stop", POSTGRES_VERSION, "main"])
        run([
            "sudo", "pg_createcluster", "-e", "utf8", "--start",
            POSTGRES_VERSION, "main"
        ])
        run(["sudo", "service", "redis-server", "restart"])
        run(["sudo", "service", "memcached", "restart"])
    if not options.is_production_travis:
        # The following block is skipped for the production Travis
        # suite, because that suite doesn't make use of these elements
        # of the development environment (it just uses the development
        # environment to build a release tarball).

        # Need to set up Django before using template_database_status
        os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
        import django
        django.setup()

        from zerver.lib.test_fixtures import template_database_status, run_db_migrations

        try:
            from zerver.lib.queue import SimpleQueueClient
            SimpleQueueClient()
            rabbitmq_is_configured = True
        except Exception:
            rabbitmq_is_configured = False

        if options.is_force or not rabbitmq_is_configured:
            run(["scripts/setup/configure-rabbitmq"])
        else:
            print("RabbitMQ is already configured.")

        migration_status_path = os.path.join(UUID_VAR_PATH,
                                             "migration_status_dev")
        dev_template_db_status = template_database_status(
            migration_status=migration_status_path,
            settings="zproject.settings",
            database_name="zulip",
        )
        if options.is_force or dev_template_db_status == 'needs_rebuild':
            run(["tools/setup/postgres-init-dev-db"])
            run(["tools/do-destroy-rebuild-database"])
        elif dev_template_db_status == 'run_migrations':
            run_db_migrations('dev')
        elif dev_template_db_status == 'current':
            print("No need to regenerate the dev DB.")

        test_template_db_status = template_database_status()
        if options.is_force or test_template_db_status == 'needs_rebuild':
            run(["tools/setup/postgres-init-test-db"])
            run(["tools/do-destroy-rebuild-test-database"])
        elif test_template_db_status == 'run_migrations':
            run_db_migrations('test')
        elif test_template_db_status == 'current':
            print("No need to regenerate the test DB.")

        # Consider updating generated translations data: both `.mo`
        # files and `language-options.json`.
        paths = ['zerver/management/commands/compilemessages.py']
        paths += glob.glob('static/locale/*/LC_MESSAGES/*.po')
        paths += glob.glob('static/locale/*/translations.json')

        if file_hash_updated(paths, "last_compilemessages_hash",
                             options.is_force):
            run(["./manage.py", "compilemessages"])
        else:
            print("No need to run `manage.py compilemessages`.")

    run(["scripts/lib/clean-unused-caches"])

    version_file = os.path.join(UUID_VAR_PATH, 'provision_version')
    print('writing to %s\n' % (version_file, ))
    open(version_file, 'w').write(PROVISION_VERSION + '\n')

    print()
    print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
    return 0
示例#4
0
def main(options: argparse.Namespace) -> "NoReturn":

    # yarn and management commands expect to be run from the root of the
    # project.
    os.chdir(ZULIP_PATH)

    # hash the apt dependencies
    sha_sum = hashlib.sha1()

    for apt_depedency in SYSTEM_DEPENDENCIES:
        sha_sum.update(apt_depedency.encode())
    if "debian" in os_families():
        with open("scripts/lib/setup-apt-repo", "rb") as fb:
            sha_sum.update(fb.read())
    else:
        # hash the content of setup-yum-repo*
        with open("scripts/lib/setup-yum-repo", "rb") as fb:
            sha_sum.update(fb.read())

    # hash the content of build-pgroonga if PGroonga is built from source
    if BUILD_PGROONGA_FROM_SOURCE:
        with open("scripts/lib/build-pgroonga", "rb") as fb:
            sha_sum.update(fb.read())

    new_apt_dependencies_hash = sha_sum.hexdigest()
    last_apt_dependencies_hash = None
    apt_hash_file_path = os.path.join(UUID_VAR_PATH, "apt_dependencies_hash")
    with open(apt_hash_file_path, "a+") as hash_file:
        hash_file.seek(0)
        last_apt_dependencies_hash = hash_file.read()

    if new_apt_dependencies_hash != last_apt_dependencies_hash:
        try:
            install_system_deps()
        except subprocess.CalledProcessError:
            try:
                # Might be a failure due to network connection issues. Retrying...
                print(WARNING + "Installing system dependencies failed; retrying..." + ENDC)
                install_system_deps()
            except BaseException as e:
                # Suppress exception chaining
                raise e from None
        with open(apt_hash_file_path, "w") as hash_file:
            hash_file.write(new_apt_dependencies_hash)
    else:
        print("No changes to apt dependencies, so skipping apt operations.")

    # Here we install node.
    proxy_env = [
        "env",
        "http_proxy=" + os.environ.get("http_proxy", ""),
        "https_proxy=" + os.environ.get("https_proxy", ""),
        "no_proxy=" + os.environ.get("no_proxy", ""),
    ]
    run_as_root([*proxy_env, "scripts/lib/install-node"], sudo_args=["-H"])
    run_as_root([*proxy_env, "scripts/lib/install-yarn"])

    if not os.access(NODE_MODULES_CACHE_PATH, os.W_OK):
        run_as_root(["mkdir", "-p", NODE_MODULES_CACHE_PATH])
        run_as_root(["chown", f"{os.getuid()}:{os.getgid()}", NODE_MODULES_CACHE_PATH])

    # This is a wrapper around `yarn`, which we run last since
    # it can often fail due to network issues beyond our control.
    try:
        setup_node_modules(prefer_offline=True)
    except subprocess.CalledProcessError:
        print(WARNING + "`yarn install` failed; retrying..." + ENDC)
        try:
            setup_node_modules()
        except subprocess.CalledProcessError:
            print(
                FAIL
                + "`yarn install` is failing; check your network connection (and proxy settings)."
                + ENDC
            )
            sys.exit(1)

    # Install shellcheck.
    run_as_root(["tools/setup/install-shellcheck"])
    # Install shfmt.
    run_as_root(["tools/setup/install-shfmt"])

    setup_venvs.main()

    run_as_root(["cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])

    if CONTINUOUS_INTEGRATION and not options.is_build_release_tarball_only:
        run_as_root(["service", "redis-server", "start"])
        run_as_root(["service", "memcached", "start"])
        run_as_root(["service", "rabbitmq-server", "start"])
        run_as_root(["service", "postgresql", "start"])
    elif "fedora" in os_families():
        # These platforms don't enable and start services on
        # installing their package, so we do that here.
        for service in [
            f"postgresql-{POSTGRESQL_VERSION}",
            "rabbitmq-server",
            "memcached",
            "redis",
        ]:
            run_as_root(["systemctl", "enable", service], sudo_args=["-H"])
            run_as_root(["systemctl", "start", service], sudo_args=["-H"])

    # If we imported modules after activating the virtualenv in this
    # Python process, they could end up mismatching with modules we’ve
    # already imported from outside the virtualenv.  That seems like a
    # bad idea, and empirically it can cause Python to segfault on
    # certain cffi-related imports.  Instead, start a new Python
    # process inside the virtualenv.
    activate_this = "/srv/zulip-py3-venv/bin/activate_this.py"
    provision_inner = os.path.join(ZULIP_PATH, "tools", "lib", "provision_inner.py")
    with open(activate_this) as f:
        exec(f.read(), dict(__file__=activate_this))
    os.execvp(
        provision_inner,
        [
            provision_inner,
            *(["--force"] if options.is_force else []),
            *(["--build-release-tarball-only"] if options.is_build_release_tarball_only else []),
            *(["--skip-dev-db-build"] if options.skip_dev_db_build else []),
        ],
    )
示例#5
0
def main(options):
    # type: (Any) -> int

    # yarn and management commands expect to be run from the root of the
    # project.
    os.chdir(ZULIP_PATH)

    # setup-apt-repo does an `apt-get update`
    # hash the apt dependencies
    sha_sum = hashlib.sha1()

    for apt_depedency in APT_DEPENDENCIES[codename]:
        sha_sum.update(apt_depedency.encode('utf8'))
    # hash the content of setup-apt-repo
    sha_sum.update(open('scripts/lib/setup-apt-repo', 'rb').read())

    new_apt_dependencies_hash = sha_sum.hexdigest()
    last_apt_dependencies_hash = None
    apt_hash_file_path = os.path.join(UUID_VAR_PATH, "apt_dependencies_hash")
    try:
        hash_file = open(apt_hash_file_path, 'r')
        last_apt_dependencies_hash = hash_file.read()
    except IOError:
        run(['touch', apt_hash_file_path])

    if (new_apt_dependencies_hash != last_apt_dependencies_hash):
        try:
            install_apt_deps()
        except subprocess.CalledProcessError:
            # Might be a failure due to network connection issues. Retrying...
            print(WARNING + "`apt-get -y install` failed while installing dependencies; retrying..." + ENDC)
            # Since a common failure mode is for the caching in
            # `setup-apt-repo` to optimize the fast code path to skip
            # running `apt-get update` when the target apt repository
            # is out of date, we run it explicitly here so that we
            # recover automatically.
            run(['sudo', 'apt-get', 'update'])
            install_apt_deps()
        hash_file = open(apt_hash_file_path, 'w')
        hash_file.write(new_apt_dependencies_hash)
    else:
        print("No changes to apt dependencies, so skipping apt operations.")

    # Here we install node.
    run(["sudo", "-H", "scripts/lib/install-node"])

    # This is a wrapper around `yarn`, which we run last since
    # it can often fail due to network issues beyond our control.
    try:
        # Hack: We remove `node_modules` as root to work around an
        # issue with the symlinks being improperly owned by root.
        if os.path.islink("node_modules"):
            run(["sudo", "rm", "-f", "node_modules"])
        run(["sudo", "mkdir", "-p", NODE_MODULES_CACHE_PATH])
        run(["sudo", "chown", "%s:%s" % (user_id, user_id), NODE_MODULES_CACHE_PATH])
        setup_node_modules(prefer_offline=True)
    except subprocess.CalledProcessError:
        print(WARNING + "`yarn install` failed; retrying..." + ENDC)
        setup_node_modules()

    # Import tools/setup_venv.py instead of running it so that we get an
    # activated virtualenv for the rest of the provisioning process.
    from tools.setup import setup_venvs
    setup_venvs.main()

    setup_shell_profile('~/.bash_profile')
    setup_shell_profile('~/.zprofile')

    run(["sudo", "cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])

    # create log directory `zulip/var/log`
    run(["mkdir", "-p", LOG_DIR_PATH])
    # create upload directory `var/uploads`
    run(["mkdir", "-p", UPLOAD_DIR_PATH])
    # create test upload directory `var/test_upload`
    run(["mkdir", "-p", TEST_UPLOAD_DIR_PATH])
    # create coverage directory`var/coverage`
    run(["mkdir", "-p", COVERAGE_DIR_PATH])
    # create linecoverage directory`var/linecoverage-report`
    run(["mkdir", "-p", LINECOVERAGE_DIR_PATH])
    # create linecoverage directory`var/node-coverage`
    run(["mkdir", "-p", NODE_TEST_COVERAGE_DIR_PATH])

    # `build_emoji` script requires `emoji-datasource` package which we install
    # via npm and hence it should be executed after we are done installing npm
    # packages.
    if not os.path.isdir(EMOJI_CACHE_PATH):
        run(["sudo", "mkdir", EMOJI_CACHE_PATH])
    run(["sudo", "chown", "%s:%s" % (user_id, user_id), EMOJI_CACHE_PATH])
    run(["tools/setup/emoji/build_emoji"])

    # copy over static files from the zulip_bots package
    run(["tools/setup/generate_zulip_bots_static_files"])

    run(["tools/generate-custom-icon-webfont"])
    run(["tools/setup/build_pygments_data"])
    run(["scripts/setup/generate_secrets.py", "--development"])
    run(["tools/update-authors-json", "--use-fixture"])
    run(["tools/inline-email-css"])
    if is_circleci or (is_travis and not options.is_production_travis):
        run(["sudo", "service", "rabbitmq-server", "restart"])
        run(["sudo", "service", "redis-server", "restart"])
        run(["sudo", "service", "memcached", "restart"])
        run(["sudo", "service", "postgresql", "restart"])
    elif options.is_docker:
        run(["sudo", "service", "rabbitmq-server", "restart"])
        run(["sudo", "pg_dropcluster", "--stop", POSTGRES_VERSION, "main"])
        run(["sudo", "pg_createcluster", "-e", "utf8", "--start", POSTGRES_VERSION, "main"])
        run(["sudo", "service", "redis-server", "restart"])
        run(["sudo", "service", "memcached", "restart"])
    if not options.is_production_travis:
        # The following block is skipped for the production Travis
        # suite, because that suite doesn't make use of these elements
        # of the development environment (it just uses the development
        # environment to build a release tarball).

        # Need to set up Django before using is_template_database_current
        os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
        import django
        django.setup()

        from zerver.lib.test_fixtures import is_template_database_current

        try:
            from zerver.lib.queue import SimpleQueueClient
            SimpleQueueClient()
            rabbitmq_is_configured = True
        except Exception:
            rabbitmq_is_configured = False

        if options.is_force or not rabbitmq_is_configured:
            run(["scripts/setup/configure-rabbitmq"])
        else:
            print("RabbitMQ is already configured.")

        migration_status_path = os.path.join(UUID_VAR_PATH, "migration_status_dev")
        if options.is_force or not is_template_database_current(
                migration_status=migration_status_path,
                settings="zproject.settings",
                database_name="zulip",
        ):
            run(["tools/setup/postgres-init-dev-db"])
            run(["tools/do-destroy-rebuild-database"])
        else:
            print("No need to regenerate the dev DB.")

        if options.is_force or not is_template_database_current():
            run(["tools/setup/postgres-init-test-db"])
            run(["tools/do-destroy-rebuild-test-database"])
        else:
            print("No need to regenerate the test DB.")

        # Consider updating generated translations data: both `.mo`
        # files and `language-options.json`.
        sha1sum = hashlib.sha1()
        paths = ['zerver/management/commands/compilemessages.py']
        paths += glob.glob('static/locale/*/LC_MESSAGES/*.po')
        paths += glob.glob('static/locale/*/translations.json')

        for path in paths:
            with open(path, 'rb') as file_to_hash:
                sha1sum.update(file_to_hash.read())

        compilemessages_hash_path = os.path.join(UUID_VAR_PATH, "last_compilemessages_hash")
        new_compilemessages_hash = sha1sum.hexdigest()
        run(['touch', compilemessages_hash_path])
        with open(compilemessages_hash_path, 'r') as hash_file:
            last_compilemessages_hash = hash_file.read()

        if options.is_force or (new_compilemessages_hash != last_compilemessages_hash):
            with open(compilemessages_hash_path, 'w') as hash_file:
                hash_file.write(new_compilemessages_hash)
            run(["./manage.py", "compilemessages"])
        else:
            print("No need to run `manage.py compilemessages`.")

        run(["./manage.py", "create_realm_internal_bots"])  # Creates realm internal bots if required.

    run(["scripts/lib/clean-unused-caches"])

    version_file = os.path.join(UUID_VAR_PATH, 'provision_version')
    print('writing to %s\n' % (version_file,))
    open(version_file, 'w').write(PROVISION_VERSION + '\n')

    print()
    print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
    return 0
示例#6
0
def main():
    # type: () -> int

    # npm install and management commands expect to be run from the root of the
    # project.
    os.chdir(ZULIP_PATH)

    run(["sudo", "./scripts/lib/setup-apt-repo"])
    # Add groonga repository to get the pgroonga packages
    run(["sudo", "add-apt-repository", "-y", "ppa:groonga/ppa"])
    run(["sudo", "apt-get", "update"])
    run(["sudo", "apt-get", "-y", "install", "--no-install-recommends"] + APT_DEPENDENCIES[codename])

    if TRAVIS:
        if PY2:
            MYPY_REQS_FILE = os.path.join(ZULIP_PATH, "requirements", "mypy.txt")
            setup_virtualenv(PY3_VENV_PATH, MYPY_REQS_FILE, patch_activate_script=True,
                             virtualenv_args=['-p', 'python3'])
            DEV_REQS_FILE = os.path.join(ZULIP_PATH, "requirements", "py2_dev.txt")
            setup_virtualenv(PY2_VENV_PATH, DEV_REQS_FILE, patch_activate_script=True)
        else:
            TWISTED_REQS_FILE = os.path.join(ZULIP_PATH, "requirements", "twisted.txt")
            setup_virtualenv("/srv/zulip-py2-twisted-venv", TWISTED_REQS_FILE,
                             patch_activate_script=True)
            DEV_REQS_FILE = os.path.join(ZULIP_PATH, "requirements", "py3_dev.txt")
            setup_virtualenv(VENV_PATH, DEV_REQS_FILE, patch_activate_script=True,
                             virtualenv_args=['-p', 'python3'])
    else:
        # Import tools/setup_venv.py instead of running it so that we get an
        # activated virtualenv for the rest of the provisioning process.
        from tools.setup import setup_venvs
        setup_venvs.main()

    # Put Python2 virtualenv activation in our .bash_profile.
    with open(os.path.expanduser('~/.bash_profile'), 'w+') as bash_profile:
        bash_profile.writelines([
            "source .bashrc\n",
            "source %s\n" % (os.path.join(VENV_PATH, "bin", "activate"),),
        ])

    run(["sudo", "cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])

    # create log directory `zulip/var/log`
    run(["mkdir", "-p", LOG_DIR_PATH])
    # create upload directory `var/uploads`
    run(["mkdir", "-p", UPLOAD_DIR_PATH])
    # create test upload directory `var/test_upload`
    run(["mkdir", "-p", TEST_UPLOAD_DIR_PATH])
    # create coverage directory`var/coverage`
    run(["mkdir", "-p", COVERAGE_DIR_PATH])
    # create linecoverage directory`var/linecoverage-report`
    run(["mkdir", "-p", LINECOVERAGE_DIR_PATH])
    # create linecoverage directory`var/node-coverage`
    run(["mkdir", "-p", NODE_TEST_COVERAGE_DIR_PATH])

    if TRAVIS:
        run(["tools/setup/install-phantomjs", "--travis"])
    else:
        run(["tools/setup/install-phantomjs"])
    run(["tools/setup/download-zxcvbn"])
    run(["tools/setup/emoji_dump/build_emoji"])
    run(["scripts/setup/generate_secrets.py", "-d"])
    if TRAVIS and not PRODUCTION_TRAVIS:
        run(["sudo", "service", "rabbitmq-server", "restart"])
        run(["sudo", "service", "redis-server", "restart"])
        run(["sudo", "service", "memcached", "restart"])
    elif "--docker" in sys.argv:
        run(["sudo", "service", "rabbitmq-server", "restart"])
        run(["sudo", "pg_dropcluster", "--stop", POSTGRES_VERSION, "main"])
        run(["sudo", "pg_createcluster", "-e", "utf8", "--start", POSTGRES_VERSION, "main"])
        run(["sudo", "service", "redis-server", "restart"])
        run(["sudo", "service", "memcached", "restart"])
    if not PRODUCTION_TRAVIS:
        # These won't be used anyway
        run(["scripts/setup/configure-rabbitmq"])
        run(["tools/setup/postgres-init-dev-db"])
        run(["tools/do-destroy-rebuild-database"])
        run(["tools/setup/postgres-init-test-db"])
        run(["tools/do-destroy-rebuild-test-database"])
        run(["python", "./manage.py", "compilemessages"])
    # Install the pinned version of npm.
    install_npm()
    # Run npm install last because it can be flaky, and that way one
    # only needs to rerun `npm install` to fix the installation.
    try:
        setup_node_modules()
    except subprocess.CalledProcessError:
        print(WARNING + "`npm install` failed; retrying..." + ENDC)
        setup_node_modules()

    print()
    print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
    return 0