Beispiel #1
0
def do_setup_virtualenv(venv_path, requirements_file, virtualenv_args):
    # type: (str, str, List[str]) -> None

    # Setup Python virtualenv
    new_packages = set(get_package_names(requirements_file))

    run(["sudo", "rm", "-rf", venv_path])
    if not try_to_copy_venv(venv_path, new_packages):
        # Create new virtualenv.
        run(["sudo", "mkdir", "-p", venv_path])
        run(["sudo", "virtualenv"] + virtualenv_args + [venv_path])
        run(["sudo", "chown", "-R",
             "{}:{}".format(os.getuid(), os.getgid()), venv_path])
        create_log_entry(get_logfile_name(venv_path), "", set(), new_packages)

    create_requirements_index_file(venv_path, requirements_file)
    # Switch current Python context to the virtualenv.
    activate_this = os.path.join(venv_path, "bin", "activate_this.py")
    exec(open(activate_this).read(), {}, dict(__file__=activate_this))

    try:
        install_venv_deps(requirements_file)
    except subprocess.CalledProcessError:
        # Might be a failure due to network connection issues. Retrying...
        print(WARNING + "`pip install` failed; retrying..." + ENDC)
        install_venv_deps(requirements_file)
    run(["sudo", "chmod", "-R", "a+rX", venv_path])
Beispiel #2
0
def setup_node_modules(npm_args=None, stdout=None, stderr=None, copy_modules=False):
    # type: (Optional[List[str]], Optional[IO], Optional[IO], Optional[bool]) -> None
    sha1sum = hashlib.sha1()
    sha1sum.update(subprocess_text_output(["cat", "package.json"]).encode("utf8"))
    sha1sum.update(subprocess_text_output(["npm", "--version"]).encode("utf8"))
    sha1sum.update(subprocess_text_output(["node", "--version"]).encode("utf8"))
    if npm_args is not None:
        sha1sum.update("".join(sorted(npm_args)).encode("utf8"))

    npm_cache = os.path.join(NPM_CACHE_PATH, sha1sum.hexdigest())
    cached_node_modules = os.path.join(npm_cache, "node_modules")
    success_stamp = os.path.join(cached_node_modules, ".success-stamp")
    # Check if a cached version already exists
    if not os.path.exists(success_stamp):
        do_npm_install(
            npm_cache,
            npm_args or [],
            stdout=stdout,
            stderr=stderr,
            success_stamp=success_stamp,
            copy_modules=copy_modules,
        )

    print("Using cached node modules from %s" % (cached_node_modules,))
    cmds = [["rm", "-rf", "node_modules"], ["ln", "-nsf", cached_node_modules, "node_modules"]]
    for cmd in cmds:
        run(cmd, stdout=stdout, stderr=stderr)
Beispiel #3
0
def install_apt_deps():
    # type: () -> None
    # setup-apt-repo does an `apt-get update`
    run(["sudo", "./scripts/lib/setup-apt-repo"])
    # By doing list -> set -> list conversion we remove duplicates.
    deps_to_install = list(set(APT_DEPENDENCIES[codename]))
    run(["sudo", "apt-get", "-y", "install", "--no-install-recommends"] + deps_to_install)
Beispiel #4
0
def setup_node_modules(production=DEFAULT_PRODUCTION, stdout=None, stderr=None, copy_modules=False,
                       prefer_offline=False):
    # type: (bool, Optional[IO], Optional[IO], bool, bool) -> None
    yarn_args = get_yarn_args(production=production)
    if prefer_offline:
        yarn_args.append("--prefer-offline")
    sha1sum = generate_sha1sum_node_modules(production=production)
    target_path = os.path.join(NODE_MODULES_CACHE_PATH, sha1sum)
    cached_node_modules = os.path.join(target_path, 'node_modules')
    success_stamp = os.path.join(target_path, '.success-stamp')
    # Check if a cached version already exists
    if not os.path.exists(success_stamp):
        do_yarn_install(target_path,
                        yarn_args,
                        success_stamp,
                        stdout=stdout,
                        stderr=stderr,
                        copy_modules=copy_modules)

    print("Using cached node modules from %s" % (cached_node_modules,))
    cmds = [
        ['rm', '-rf', 'node_modules'],
        ["ln", "-nsf", cached_node_modules, 'node_modules'],
    ]
    for cmd in cmds:
        run(cmd, stdout=stdout, stderr=stderr)
Beispiel #5
0
def do_yarn_install(target_path, yarn_args, success_stamp, stdout=None, stderr=None,
                    copy_modules=False):
    # type: (str, List[str], str, Optional[IO[Any]], Optional[IO[Any]], bool) -> None
    cmds = [
        ['mkdir', '-p', target_path],
        ['cp', 'package.json', "yarn.lock", target_path],
    ]
    cached_node_modules = os.path.join(target_path, 'node_modules')
    if copy_modules:
        print("Cached version not found! Copying node modules.")
        cmds.append(["cp", "-rT", "prod-static/serve/node_modules", cached_node_modules])
    else:
        print("Cached version not found! Installing node modules.")

        # Copy the existing node_modules to speed up install
        if os.path.exists("node_modules"):
            cmds.append(["cp", "-R", "node_modules/", cached_node_modules])
        cd_exec = os.path.join(ZULIP_PATH, "scripts/lib/cd_exec")
        if os.environ.get('CUSTOM_CA_CERTIFICATES'):
            cmds.append([YARN_BIN, "config", "set", "cafile", os.environ['CUSTOM_CA_CERTIFICATES']])
        cmds.append([cd_exec, target_path, YARN_BIN, "install", "--non-interactive"] +
                    yarn_args)
    cmds.append(['touch', success_stamp])

    for cmd in cmds:
        run(cmd, stdout=stdout, stderr=stderr)
Beispiel #6
0
def do_setup_virtualenv(venv_path, requirements_file, virtualenv_args):
    # type: (str, str, List[str]) -> None

    # Setup Python virtualenv
    new_packages = set(get_package_names(requirements_file))

    run_as_root(["rm", "-rf", venv_path])
    if not try_to_copy_venv(venv_path, new_packages):
        # Create new virtualenv.
        run_as_root(["mkdir", "-p", venv_path])
        run_as_root(["virtualenv"] + virtualenv_args + [venv_path])
        run_as_root(["chown", "-R",
                     "{}:{}".format(os.getuid(), os.getgid()), venv_path])
        create_log_entry(get_logfile_name(venv_path), "", set(), new_packages)

    create_requirements_index_file(venv_path, requirements_file)

    pip = os.path.join(venv_path, "bin", "pip")

    # use custom certificate if needed
    if os.environ.get('CUSTOM_CA_CERTIFICATES'):
        print("Configuring pip to use custom CA certificates...")
        add_cert_to_pipconf()

    # CentOS-specific hack/workaround
    # Install pycurl with custom flag due to this error when installing
    # via pip:
    # __main__.ConfigurationError: Curl is configured to use SSL, but
    # we have not been able to determine which SSL backend it is using.
    # Please see PycURL documentation for how to specify the SSL
    # backend manually.
    # See https://github.com/pycurl/pycurl/issues/526
    # The fix exists on pycurl master, but not yet in any release
    # We can likely remove this when pycurl > 7.43.0.2 comes out.
    if os.path.exists("/etc/redhat-release"):
        pycurl_env = os.environ.copy()
        pycurl_env["PYCURL_SSL_LIBRARY"] = "nss"
        run([pip, "install", "pycurl==7.43.0.2", "--compile", "--no-cache-dir"],
            env=pycurl_env)

    try:
        install_venv_deps(pip, requirements_file)
    except subprocess.CalledProcessError:
        # Might be a failure due to network connection issues. Retrying...
        print(WARNING + "`pip install` failed; retrying..." + ENDC)
        install_venv_deps(pip, requirements_file)

    # The typing module has been included in stdlib since 3.5.
    # Installing a pypi version of it has been harmless until a bug
    # "AttributeError: type object 'Callable' has no attribute
    # '_abc_registry'" happens in 3.7. And so just to be safe, it is
    # disabled from now on for all >= 3.5 versions.
    # Remove this once 3.4 is no longer supported.
    at_least_35 = (sys.version_info.major == 3) and (sys.version_info.minor >= 5)
    if at_least_35 and ('python2.7' not in virtualenv_args):
        run([pip, "uninstall", "-y", "typing"])

    run_as_root(["chmod", "-R", "a+rX", venv_path])
Beispiel #7
0
def do_npm_install(target_path, npm_args, stdout=None, stderr=None, copy_modules=False, success_stamp=None):
    # type: (str, List[str], Optional[IO], Optional[IO], Optional[bool], Optional[str]) -> None
    cmds = [["rm", "-rf", target_path], ["mkdir", "-p", target_path], ["cp", "package.json", target_path]]
    if copy_modules:
        print("Cached version not found! Copying node modules.")
        cmds.append(["cp", "-rT", "prod-static/serve/node_modules", os.path.join(target_path, "node_modules")])
    else:
        print("Cached version not found! Installing node modules.")
        cmds.append(["npm", "install"] + npm_args + ["--prefix", target_path])
    cmds.append(["touch", success_stamp])

    for cmd in cmds:
        run(cmd, stdout=stdout, stderr=stderr)
Beispiel #8
0
def main(is_travis=False):
    # type: (bool) -> None
    # Get the correct Python interpreter. If we don't do this and use
    # `virtualenv -p python3` to create the venv in Travis, the venv
    # starts referring to the system Python interpreter.
    python_interpreter = subprocess_text_output(['which', 'python3'])
    if is_travis:
        setup_virtualenv(VENV_PATH, DEV_REQS_FILE, patch_activate_script=True,
                         virtualenv_args=['-p', python_interpreter])
    else:
        run(['sudo', 'rm', '-f', OLD_VENV_PATH])
        setup_virtualenv(VENV_PATH, DEV_REQS_FILE, patch_activate_script=True,
                         virtualenv_args=['-p', python_interpreter])
Beispiel #9
0
    def handle(self, *args: Any, **options: Any) -> None:
        timestamp = timezone_now().strftime(TIMESTAMP_FORMAT)

        with tempfile.TemporaryDirectory(
            prefix="zulip-backup-%s-" % (timestamp,)
        ) as tmp:
            os.mkdir(os.path.join(tmp, "zulip-backup"))
            members = []

            if settings.DEVELOPMENT:
                os.symlink(
                    os.path.join(settings.DEPLOY_ROOT, "zproject"),
                    os.path.join(tmp, "zulip-backup", "zproject"),
                )
                members.append("zulip-backup/zproject/dev-secrets.conf")
            else:
                os.symlink("/etc/zulip", os.path.join(tmp, "zulip-backup", "settings"))
                members.append("zulip-backup/settings")

            db_name = settings.DATABASES["default"]["NAME"]
            db_dir = os.path.join(tmp, "zulip-backup", "database")
            run(["pg_dump", "--format=directory", db_name, "--file", db_dir])
            members.append("zulip-backup/database")

            if settings.LOCAL_UPLOADS_DIR is not None and os.path.exists(
                os.path.join(settings.DEPLOY_ROOT, settings.LOCAL_UPLOADS_DIR)
            ):
                os.symlink(
                    os.path.join(settings.DEPLOY_ROOT, settings.LOCAL_UPLOADS_DIR),
                    os.path.join(tmp, "zulip-backup", "uploads"),
                )
                members.append("zulip-backup/uploads")

            try:
                if options["output"] is None:
                    tarball_path = tempfile.NamedTemporaryFile(
                        prefix="zulip-backup-%s-" % (timestamp,),
                        suffix=".tar.gz",
                        delete=False,
                    ).name
                else:
                    tarball_path = options["output"]

                run(["tar", "-C", tmp, "-chzf", tarball_path, "--"] + members)
                print("Backup tarball written to %s" % (tarball_path,))
            except BaseException:
                if options["output"] is None:
                    os.unlink(tarball_path)
                raise
Beispiel #10
0
def run_db_migrations(platform: str) -> None:
    if platform == 'dev':
        migration_status_file = 'migration_status_dev'
        settings = 'zproject.settings'
    elif platform == 'test':
        migration_status_file = 'migration_status_test'
        settings = 'zproject.test_settings'

    # We shell out to `manage.py` and pass `DJANGO_SETTINGS_MODULE` on
    # the command line rather than just calling the migration
    # functions, because Django doesn't support changing settings like
    # what the database is as runtime.
    run(['env', ('DJANGO_SETTINGS_MODULE=%s' % settings), './manage.py',
         'migrate', '--no-input'])
    run(['env', ('DJANGO_SETTINGS_MODULE=%s' % settings), './manage.py',
         'get_migration_status', '--output=%s' % (migration_status_file)])
Beispiel #11
0
def do_npm_install(target_path, npm_args, stdout=None, stderr=None, copy_modules=False):
    # type: (str, List[str], Optional[str], Optional[str], Optional[bool]) -> None
    cmds = [
        ["sudo", "rm", "-rf", target_path],
        ['sudo', 'mkdir', '-p', target_path],
        ["sudo", "chown", "{}:{}".format(os.getuid(), os.getgid()), target_path],
        ['cp', 'package.json', target_path],
    ]
    if copy_modules:
        print("Cached version not found! Copying node modules.")
        cmds.append(["mv", "node_modules", target_path])
    else:
        print("Cached version not found! Installing node modules.")
        cmds.append(['npm', 'install'] + npm_args + ['--prefix', target_path])

    for cmd in cmds:
        run(cmd, stdout=stdout, stderr=stderr)
Beispiel #12
0
def do_npm_install(target_path, npm_args, stdout=None, stderr=None, copy_modules=False,
                   success_stamp=None):
    # type: (str, List[str], Optional[IO], Optional[IO], Optional[bool], Optional[str]) -> None
    cmds = [
        ["rm", "-rf", target_path],
        ['mkdir', '-p', target_path],
        ['cp', 'package.json', target_path],
    ]
    if copy_modules:
        print("Cached version not found! Copying node modules.")
        cmds.append(["mv", "node_modules", target_path])
    else:
        print("Cached version not found! Installing node modules.")
        cmds.append(['npm', 'install'] + npm_args + ['--prefix', target_path])
    cmds.append(['touch', success_stamp])

    for cmd in cmds:
        run(cmd, stdout=stdout, stderr=stderr)
Beispiel #13
0
def run_db_migrations(platform: str) -> None:
    if platform == 'dev':
        migration_status_file = 'migration_status_dev'
        settings = 'zproject.settings'
        db_name = 'ZULIP_DB_NAME=zulip'
    elif platform == 'test':
        migration_status_file = 'migration_status_test'
        settings = 'zproject.test_settings'
        db_name = 'ZULIP_DB_NAME=zulip_test_template'

    # We shell out to `manage.py` and pass `DJANGO_SETTINGS_MODULE` on
    # the command line rather than just calling the migration
    # functions, because Django doesn't support changing settings like
    # what the database is as runtime.
    # Also we export DB_NAME which is ignored by dev platform but
    # recognised by test platform and used to migrate correct db.
    run(['env', ('DJANGO_SETTINGS_MODULE=%s' % settings), db_name,
         './manage.py', 'migrate', '--no-input'])
    run(['env', ('DJANGO_SETTINGS_MODULE=%s' % settings), db_name,
         './manage.py', 'get_migration_status',
         '--output=%s' % (migration_status_file)])
Beispiel #14
0
def setup_virtualenv(target_venv_path, requirements_file, virtualenv_args=None, patch_activate_script=False):
    # type: (Optional[str], str, Optional[List[str]], bool) -> str

    # Check if a cached version already exists
    path = os.path.join(ZULIP_PATH, 'scripts', 'lib', 'hash_reqs.py')
    output = subprocess.check_output([path, requirements_file], universal_newlines=True)
    sha1sum = output.split()[0]
    if target_venv_path is None:
        cached_venv_path = os.path.join(VENV_CACHE_PATH, sha1sum, 'venv')
    else:
        cached_venv_path = os.path.join(VENV_CACHE_PATH, sha1sum, os.path.basename(target_venv_path))
    success_stamp = os.path.join(cached_venv_path, "success-stamp")
    if not os.path.exists(success_stamp):
        do_setup_virtualenv(cached_venv_path, requirements_file, virtualenv_args or [])
        open(success_stamp, 'w').close()

    print("Using cached Python venv from %s" % (cached_venv_path,))
    if target_venv_path is not None:
        run(["sudo", "ln", "-nsf", cached_venv_path, target_venv_path])
        if patch_activate_script:
            do_patch_activate_script(target_venv_path)
    return cached_venv_path
Beispiel #15
0
def setup_node_modules(npm_args=None, stdout=None, stderr=None, copy_modules=False):
    # type: (Optional[List[str]], Optional[IO], Optional[IO], Optional[bool]) -> None
    sha1sum = generate_sha1sum_node_modules(npm_args)
    npm_cache = os.path.join(NPM_CACHE_PATH, sha1sum)
    cached_node_modules = os.path.join(npm_cache, 'node_modules')
    success_stamp = os.path.join(cached_node_modules, '.success-stamp')
    # Check if a cached version already exists
    if not os.path.exists(success_stamp):
        do_npm_install(npm_cache,
                       npm_args or [],
                       success_stamp,
                       stdout=stdout,
                       stderr=stderr,
                       copy_modules=copy_modules)

    print("Using cached node modules from %s" % (cached_node_modules,))
    cmds = [
        ['rm', '-rf', 'node_modules'],
        ["ln", "-nsf", cached_node_modules, 'node_modules'],
    ]
    for cmd in cmds:
        run(cmd, stdout=stdout, stderr=stderr)
Beispiel #16
0
def install_npm():
    # type: () -> None
    if not TRAVIS:
        if subprocess_text_output(['npm', '--version']) != NPM_VERSION:
            run(["sudo", "npm", "install", "-g", "npm@{}".format(NPM_VERSION)])

        return

    run(['mkdir', '-p', TRAVIS_NODE_PATH])

    npm_exe = os.path.join(TRAVIS_NODE_PATH, 'bin', 'npm')
    travis_npm = subprocess_text_output(['which', 'npm'])
    if os.path.exists(npm_exe):
        run(['sudo', 'ln', '-sf', npm_exe, travis_npm])

    version = subprocess_text_output(['npm', '--version'])
    if os.path.exists(npm_exe) and version == NPM_VERSION:
        print("Using cached npm")
        return

    run(["npm", "install", "-g", "--prefix", TRAVIS_NODE_PATH, "npm@{}".format(NPM_VERSION)])
    run(['sudo', 'ln', '-sf', npm_exe, travis_npm])
Beispiel #17
0
def setup_node_modules(npm_args=None, stdout=None, stderr=None, copy_modules=False):
    # type: (Optional[List[str]], Optional[str], Optional[str], Optional[bool]) -> None
    sha1sum = hashlib.sha1()
    sha1sum.update(subprocess_text_output(['cat', 'package.json']).encode('utf8'))
    sha1sum.update(subprocess_text_output(['npm', '--version']).encode('utf8'))
    sha1sum.update(subprocess_text_output(['node', '--version']).encode('utf8'))
    if npm_args is not None:
        sha1sum.update(''.join(sorted(npm_args)).encode('utf8'))

    npm_cache = os.path.join(NPM_CACHE_PATH, sha1sum.hexdigest())
    cached_node_modules = os.path.join(npm_cache, 'node_modules')
    success_stamp = os.path.join(cached_node_modules, '.success-stamp')
    # Check if a cached version already exists
    if not os.path.exists(success_stamp):
        do_npm_install(npm_cache, npm_args or [], stdout, stderr, copy_modules)

    print("Using cached node modules from %s" % (cached_node_modules,))
    cmds = [
        ['rm', '-rf', 'node_modules'],
        ["ln", "-nsf", cached_node_modules, 'node_modules'],
        ['touch', success_stamp],
    ]
    for cmd in cmds:
        run(cmd, stdout=stdout, stderr=stderr)
Beispiel #18
0
def setup_virtualenv(target_venv_path, requirements_file, virtualenv_args=None, patch_activate_script=False):
    # type: (Optional[str], str, Optional[List[str]], bool) -> str

    # Check if a cached version already exists
    path = os.path.join(ZULIP_PATH, 'scripts', 'lib', 'hash_reqs.py')
    output = subprocess.check_output([path, requirements_file], universal_newlines=True)
    sha1sum = output.split()[0]
    if target_venv_path is None:
        cached_venv_path = os.path.join(VENV_CACHE_PATH, sha1sum, 'venv')
    else:
        cached_venv_path = os.path.join(VENV_CACHE_PATH, sha1sum, os.path.basename(target_venv_path))
    success_stamp = os.path.join(cached_venv_path, "success-stamp")
    if not os.path.exists(success_stamp):
        do_setup_virtualenv(cached_venv_path, requirements_file, virtualenv_args or [])
        run(["touch", success_stamp])

    print("Using cached Python venv from %s" % (cached_venv_path,))
    if target_venv_path is not None:
        run(["sudo", "ln", "-nsf", cached_venv_path, target_venv_path])
        if patch_activate_script:
            do_patch_activate_script(target_venv_path)
    activate_this = os.path.join(cached_venv_path, "bin", "activate_this.py")
    exec(open(activate_this).read(), {}, dict(__file__=activate_this)) # type: ignore # https://github.com/python/mypy/issues/1577
    return cached_venv_path
Beispiel #19
0
def install_apt_deps(deps_to_install, retry=False):
    # type: (List[str], bool) -> None
    if retry:
        print(WARNING + "`apt-get -y install` failed while installing dependencies; retrying..." + ENDC)
        # Since a common failure mode is for the caching in
        # `setup-apt-repo` to optimize the fast code path to skip
        # running `apt-get update` when the target apt repository
        # is out of date, we run it explicitly here so that we
        # recover automatically.
        run(['sudo', 'apt-get', 'update'])

    # setup-apt-repo does an `apt-get update`
    run(["sudo", "./scripts/lib/setup-apt-repo"])
    run(["sudo", "apt-get", "-y", "install", "--no-install-recommends"] + deps_to_install)
Beispiel #20
0
def copy_parent_log(source_log, target_log):
    # type: (str, str) -> None
    if os.path.exists(source_log):
        run('cp {} {}'.format(source_log, target_log).split())
def main(options: argparse.Namespace) -> int:
    setup_shell_profile('~/.bash_profile')
    setup_shell_profile('~/.zprofile')

    # This needs to happen before anything that imports zproject.settings.
    run(["scripts/setup/generate_secrets.py", "--development"])

    # create log directory `zulip/var/log`
    os.makedirs(LOG_DIR_PATH, exist_ok=True)
    # create upload directory `var/uploads`
    os.makedirs(UPLOAD_DIR_PATH, exist_ok=True)
    # create test upload directory `var/test_upload`
    os.makedirs(TEST_UPLOAD_DIR_PATH, exist_ok=True)
    # create coverage directory `var/coverage`
    os.makedirs(COVERAGE_DIR_PATH, exist_ok=True)
    # create linecoverage directory `var/node-coverage`
    os.makedirs(NODE_TEST_COVERAGE_DIR_PATH, exist_ok=True)
    # create XUnit XML test results directory`var/xunit-test-results`
    os.makedirs(XUNIT_XML_TEST_RESULTS_DIR_PATH, exist_ok=True)

    # The `build_emoji` script requires `emoji-datasource` package
    # which we install via npm; thus this step is after installing npm
    # packages.
    if not os.access(EMOJI_CACHE_PATH, os.W_OK):
        run_as_root(["mkdir", "-p", EMOJI_CACHE_PATH])
        run_as_root(
            ["chown",
             "%s:%s" % (os.getuid(), os.getgid()), EMOJI_CACHE_PATH])
    run(["tools/setup/emoji/build_emoji"])

    # copy over static files from the zulip_bots package
    generate_zulip_bots_static_files()

    build_pygments_data_paths = [
        "tools/setup/build_pygments_data", "tools/setup/lang.json"
    ]
    from pygments import __version__ as pygments_version
    if file_or_package_hash_updated(build_pygments_data_paths,
                                    "build_pygments_data_hash",
                                    options.is_force, [pygments_version]):
        run(["tools/setup/build_pygments_data"])
    else:
        print("No need to run `tools/setup/build_pygments_data`.")

    update_authors_json_paths = [
        "tools/update-authors-json", "zerver/tests/fixtures/authors.json"
    ]
    if file_or_package_hash_updated(update_authors_json_paths,
                                    "update_authors_json_hash",
                                    options.is_force):
        run(["tools/update-authors-json", "--use-fixture"])
    else:
        print("No need to run `tools/update-authors-json`.")

    email_source_paths = [
        "tools/inline-email-css", "templates/zerver/emails/email.css"
    ]
    email_source_paths += glob.glob('templates/zerver/emails/*.source.html')
    if file_or_package_hash_updated(email_source_paths,
                                    "last_email_source_files_hash",
                                    options.is_force):
        run(["tools/inline-email-css"])
    else:
        print("No need to run `tools/inline-email-css`.")

    if not options.is_production_travis:
        # The following block is skipped for the production Travis
        # suite, because that suite doesn't make use of these elements
        # of the development environment (it just uses the development
        # environment to build a release tarball).

        # Need to set up Django before using template_database_status
        os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
        import django
        django.setup()

        from zerver.lib.test_fixtures import template_database_status, run_db_migrations, \
            destroy_leaked_test_databases

        try:
            from zerver.lib.queue import SimpleQueueClient
            SimpleQueueClient()
            rabbitmq_is_configured = True
        except Exception:
            rabbitmq_is_configured = False

        if options.is_force or not rabbitmq_is_configured:
            run(["scripts/setup/configure-rabbitmq"])
        else:
            print("RabbitMQ is already configured.")

        migration_status_path = os.path.join(UUID_VAR_PATH,
                                             "migration_status_dev")
        dev_template_db_status = template_database_status(
            migration_status=migration_status_path,
            settings="zproject.settings",
            database_name="zulip",
        )
        if options.is_force or dev_template_db_status == 'needs_rebuild':
            run(["tools/setup/postgres-init-dev-db"])
            run(["tools/do-destroy-rebuild-database"])
        elif dev_template_db_status == 'run_migrations':
            run_db_migrations('dev')
        elif dev_template_db_status == 'current':
            print("No need to regenerate the dev DB.")

        test_template_db_status = template_database_status()
        if options.is_force or test_template_db_status == 'needs_rebuild':
            run(["tools/setup/postgres-init-test-db"])
            run(["tools/do-destroy-rebuild-test-database"])
        elif test_template_db_status == 'run_migrations':
            run_db_migrations('test')
        elif test_template_db_status == 'current':
            print("No need to regenerate the test DB.")

        # Consider updating generated translations data: both `.mo`
        # files and `language-options.json`.
        paths = ['zerver/management/commands/compilemessages.py']
        paths += glob.glob('locale/*/LC_MESSAGES/*.po')
        paths += glob.glob('locale/*/translations.json')

        if file_or_package_hash_updated(paths, "last_compilemessages_hash",
                                        options.is_force):
            run(["./manage.py", "compilemessages"])
        else:
            print("No need to run `manage.py compilemessages`.")

        destroyed = destroy_leaked_test_databases()
        if destroyed:
            print("Dropped %s stale test databases!" % (destroyed, ))

    run(["scripts/lib/clean-unused-caches", "--threshold=6"])

    # Keeping this cache file around can cause eslint to throw
    # random TypeErrors when new/updated dependencies are added
    if os.path.isfile('.eslintcache'):
        # Remove this block when
        # https://github.com/eslint/eslint/issues/11639 is fixed
        # upstream.
        os.remove('.eslintcache')

    # Clean up the root of the `var/` directory for various
    # testing-related files that we have migrated to
    # `var/<uuid>/test-backend`.
    print("Cleaning var/ directory files...")
    var_paths = glob.glob('var/test*')
    var_paths.append('var/bot_avatar')
    for path in var_paths:
        try:
            if os.path.isdir(path):
                shutil.rmtree(path)
            else:
                os.remove(path)
        except FileNotFoundError:
            pass

    version_file = os.path.join(UUID_VAR_PATH, 'provision_version')
    print('writing to %s\n' % (version_file, ))
    open(version_file, 'w').write(PROVISION_VERSION + '\n')

    print()
    print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
    return 0
Beispiel #22
0
def do_setup_virtualenv(venv_path, requirements_file, virtualenv_args):
    # type: (str, str, List[str]) -> None

    # Setup Python virtualenv
    new_packages = set(get_package_names(requirements_file))

    run(["sudo", "rm", "-rf", venv_path])
    if not try_to_copy_venv(venv_path, new_packages):
        # Create new virtualenv.
        run(["sudo", "mkdir", "-p", venv_path])
        run(["sudo", "virtualenv"] + virtualenv_args + [venv_path])
        run(["sudo", "chown", "-R",
             "{}:{}".format(os.getuid(), os.getgid()), venv_path])
        create_log_entry(get_logfile_name(venv_path), "", set(), new_packages)

    create_requirements_index_file(venv_path, requirements_file)
    # Switch current Python context to the virtualenv.
    activate_this = os.path.join(venv_path, "bin", "activate_this.py")
    exec(open(activate_this).read(), {}, dict(__file__=activate_this)) # type: ignore # https://github.com/python/mypy/issues/1577

    run(["pip", "install", "-U", "setuptools"])
    run(["pip", "install", "--upgrade", "pip", "wheel"])
    run(["pip", "install", "--no-deps", "--requirement", requirements_file])
    run(["sudo", "chmod", "-R", "a+rX", venv_path])
Beispiel #23
0
def main(options):
    # type: (Any) -> int

    # yarn and management commands expect to be run from the root of the
    # project.
    os.chdir(ZULIP_PATH)

    # setup-apt-repo does an `apt-get update`
    # hash the apt dependencies
    sha_sum = hashlib.sha1()

    for apt_depedency in APT_DEPENDENCIES[codename]:
        sha_sum.update(apt_depedency.encode('utf8'))
    # hash the content of setup-apt-repo
    sha_sum.update(open('scripts/lib/setup-apt-repo', 'rb').read())

    new_apt_dependencies_hash = sha_sum.hexdigest()
    last_apt_dependencies_hash = None
    apt_hash_file_path = os.path.join(UUID_VAR_PATH, "apt_dependencies_hash")
    try:
        hash_file = open(apt_hash_file_path, 'r')
        last_apt_dependencies_hash = hash_file.read()
    except IOError:
        run(['touch', apt_hash_file_path])

    if (new_apt_dependencies_hash != last_apt_dependencies_hash):
        try:
            install_apt_deps()
        except subprocess.CalledProcessError:
            # Might be a failure due to network connection issues. Retrying...
            print(WARNING + "`apt-get -y install` failed while installing dependencies; retrying..." + ENDC)
            # Since a common failure mode is for the caching in
            # `setup-apt-repo` to optimize the fast code path to skip
            # running `apt-get update` when the target apt repository
            # is out of date, we run it explicitly here so that we
            # recover automatically.
            run(['sudo', 'apt-get', 'update'])
            install_apt_deps()
        hash_file = open(apt_hash_file_path, 'w')
        hash_file.write(new_apt_dependencies_hash)
    else:
        print("No changes to apt dependencies, so skipping apt operations.")

    # Here we install node.
    run(["sudo", "-H", "scripts/lib/install-node"])

    # This is a wrapper around `yarn`, which we run last since
    # it can often fail due to network issues beyond our control.
    try:
        # Hack: We remove `node_modules` as root to work around an
        # issue with the symlinks being improperly owned by root.
        if os.path.islink("node_modules"):
            run(["sudo", "rm", "-f", "node_modules"])
        run(["sudo", "mkdir", "-p", NODE_MODULES_CACHE_PATH])
        run(["sudo", "chown", "%s:%s" % (user_id, user_id), NODE_MODULES_CACHE_PATH])
        setup_node_modules(prefer_offline=True)
    except subprocess.CalledProcessError:
        print(WARNING + "`yarn install` failed; retrying..." + ENDC)
        setup_node_modules()

    # Import tools/setup_venv.py instead of running it so that we get an
    # activated virtualenv for the rest of the provisioning process.
    from tools.setup import setup_venvs
    setup_venvs.main()

    setup_shell_profile('~/.bash_profile')
    setup_shell_profile('~/.zprofile')

    run(["sudo", "cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])

    # create log directory `zulip/var/log`
    run(["mkdir", "-p", LOG_DIR_PATH])
    # create upload directory `var/uploads`
    run(["mkdir", "-p", UPLOAD_DIR_PATH])
    # create test upload directory `var/test_upload`
    run(["mkdir", "-p", TEST_UPLOAD_DIR_PATH])
    # create coverage directory`var/coverage`
    run(["mkdir", "-p", COVERAGE_DIR_PATH])
    # create linecoverage directory`var/linecoverage-report`
    run(["mkdir", "-p", LINECOVERAGE_DIR_PATH])
    # create linecoverage directory`var/node-coverage`
    run(["mkdir", "-p", NODE_TEST_COVERAGE_DIR_PATH])

    # `build_emoji` script requires `emoji-datasource` package which we install
    # via npm and hence it should be executed after we are done installing npm
    # packages.
    if not os.path.isdir(EMOJI_CACHE_PATH):
        run(["sudo", "mkdir", EMOJI_CACHE_PATH])
    run(["sudo", "chown", "%s:%s" % (user_id, user_id), EMOJI_CACHE_PATH])
    run(["tools/setup/emoji/build_emoji"])

    # copy over static files from the zulip_bots package
    run(["tools/setup/generate_zulip_bots_static_files"])

    run(["tools/generate-custom-icon-webfont"])
    run(["tools/setup/build_pygments_data"])
    run(["scripts/setup/generate_secrets.py", "--development"])
    run(["tools/update-authors-json", "--use-fixture"])
    run(["tools/inline-email-css"])
    if is_circleci or (is_travis and not options.is_production_travis):
        run(["sudo", "service", "rabbitmq-server", "restart"])
        run(["sudo", "service", "redis-server", "restart"])
        run(["sudo", "service", "memcached", "restart"])
        run(["sudo", "service", "postgresql", "restart"])
    elif options.is_docker:
        run(["sudo", "service", "rabbitmq-server", "restart"])
        run(["sudo", "pg_dropcluster", "--stop", POSTGRES_VERSION, "main"])
        run(["sudo", "pg_createcluster", "-e", "utf8", "--start", POSTGRES_VERSION, "main"])
        run(["sudo", "service", "redis-server", "restart"])
        run(["sudo", "service", "memcached", "restart"])
    if not options.is_production_travis:
        # The following block is skipped for the production Travis
        # suite, because that suite doesn't make use of these elements
        # of the development environment (it just uses the development
        # environment to build a release tarball).

        # Need to set up Django before using is_template_database_current
        os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
        import django
        django.setup()

        from zerver.lib.test_fixtures import is_template_database_current

        try:
            from zerver.lib.queue import SimpleQueueClient
            SimpleQueueClient()
            rabbitmq_is_configured = True
        except Exception:
            rabbitmq_is_configured = False

        if options.is_force or not rabbitmq_is_configured:
            run(["scripts/setup/configure-rabbitmq"])
        else:
            print("RabbitMQ is already configured.")

        migration_status_path = os.path.join(UUID_VAR_PATH, "migration_status_dev")
        if options.is_force or not is_template_database_current(
                migration_status=migration_status_path,
                settings="zproject.settings",
                database_name="zulip",
        ):
            run(["tools/setup/postgres-init-dev-db"])
            run(["tools/do-destroy-rebuild-database"])
        else:
            print("No need to regenerate the dev DB.")

        if options.is_force or not is_template_database_current():
            run(["tools/setup/postgres-init-test-db"])
            run(["tools/do-destroy-rebuild-test-database"])
        else:
            print("No need to regenerate the test DB.")

        # Consider updating generated translations data: both `.mo`
        # files and `language-options.json`.
        sha1sum = hashlib.sha1()
        paths = ['zerver/management/commands/compilemessages.py']
        paths += glob.glob('static/locale/*/LC_MESSAGES/*.po')
        paths += glob.glob('static/locale/*/translations.json')

        for path in paths:
            with open(path, 'rb') as file_to_hash:
                sha1sum.update(file_to_hash.read())

        compilemessages_hash_path = os.path.join(UUID_VAR_PATH, "last_compilemessages_hash")
        new_compilemessages_hash = sha1sum.hexdigest()
        run(['touch', compilemessages_hash_path])
        with open(compilemessages_hash_path, 'r') as hash_file:
            last_compilemessages_hash = hash_file.read()

        if options.is_force or (new_compilemessages_hash != last_compilemessages_hash):
            with open(compilemessages_hash_path, 'w') as hash_file:
                hash_file.write(new_compilemessages_hash)
            run(["./manage.py", "compilemessages"])
        else:
            print("No need to run `manage.py compilemessages`.")

        run(["./manage.py", "create_realm_internal_bots"])  # Creates realm internal bots if required.

    run(["scripts/lib/clean-unused-caches"])

    version_file = os.path.join(UUID_VAR_PATH, 'provision_version')
    print('writing to %s\n' % (version_file,))
    open(version_file, 'w').write(PROVISION_VERSION + '\n')

    print()
    print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
    return 0
Beispiel #24
0
def try_to_copy_venv(venv_path, new_packages):
    # type: (str, Set[str]) -> bool
    """
    Tries to copy packages from an old virtual environment in the cache
    to the new virtual environment. The algorithm works as follows:
        1. Find a virtual environment, v, from the cache that has the
        highest overlap with the new requirements such that:
            a. The new requirements only add to the packages of v.
            b. The new requirements only upgrade packages of v.
        2. Copy the contents of v to the new virtual environment using
        virtualenv-clone.
        3. Delete all .pyc files in the new virtual environment.
    """
    if not os.path.exists(VENV_CACHE_PATH):
        return False

    venv_name = os.path.basename(venv_path)

    overlaps = []  # type: List[Tuple[int, str, Set[str]]]
    old_packages = set()  # type: Set[str]
    for sha1sum in os.listdir(VENV_CACHE_PATH):
        curr_venv_path = os.path.join(VENV_CACHE_PATH, sha1sum, venv_name)
        if (curr_venv_path == venv_path or
                not os.path.exists(get_index_filename(curr_venv_path))):
            continue

        old_packages = get_venv_packages(curr_venv_path)
        # We only consider using using old virtualenvs that only
        # contain packages that we want in our new virtualenv.
        if not (old_packages - new_packages):
            overlap = new_packages & old_packages
            overlaps.append((len(overlap), curr_venv_path, overlap))

    target_log = get_logfile_name(venv_path)
    source_venv_path = None
    if overlaps:
        # Here, we select the old virtualenv with the largest overlap
        overlaps = sorted(overlaps)
        _, source_venv_path, copied_packages = overlaps[-1]
        print('Copying packages from {}'.format(source_venv_path))
        clone_ve = "{}/bin/virtualenv-clone".format(source_venv_path)
        cmd = "sudo {exe} {source} {target}".format(exe=clone_ve,
                                                    source=source_venv_path,
                                                    target=venv_path).split()
        try:
            # TODO: We can probably remove this in a few months, now
            # that we can expect that virtualenv-clone is present in
            # all of our recent virtualenvs.
            run(cmd)
        except Exception:
            # Virtualenv-clone is not installed. Install it and try running
            # the command again.
            try:
                run("{}/bin/pip install --no-deps virtualenv-clone".format(
                    source_venv_path).split())
                run(cmd)
            except Exception:
                # virtualenv-clone isn't working, so just make a new venv
                return False

        run(["sudo", "chown", "-R",
             "{}:{}".format(os.getuid(), os.getgid()), venv_path])
        source_log = get_logfile_name(source_venv_path)
        copy_parent_log(source_log, target_log)
        create_log_entry(target_log, source_venv_path, copied_packages,
                         new_packages - copied_packages)
        return True

    return False
Beispiel #25
0
def add_cert_to_pipconf():
    # type: () -> None
    conffile = os.path.expanduser("~/.pip/pip.conf")
    confdir = os.path.expanduser("~/.pip/")
    os.makedirs(confdir, exist_ok=True)
    run(["crudini", "--set", conffile, "global", "cert", os.environ["CUSTOM_CA_CERTIFICATES"]])
Beispiel #26
0
def install_apt_deps():
    # type: () -> None
    # setup-apt-repo does an `apt-get update`
    run(["sudo", "./scripts/lib/setup-apt-repo"])
    run(["sudo", "apt-get", "-y", "install", "--no-install-recommends"] +
        APT_DEPENDENCIES[codename])
Beispiel #27
0
    def handle(self, *args: Any, **options: Any) -> None:
        timestamp = timezone_now().strftime(TIMESTAMP_FORMAT)
        with tempfile.TemporaryDirectory(
                prefix=f"zulip-backup-{timestamp}-", ) as tmp:
            os.mkdir(os.path.join(tmp, "zulip-backup"))
            members = []
            paths = []

            with open(os.path.join(tmp, "zulip-backup", "zulip-version"),
                      "w") as f:
                print(ZULIP_VERSION, file=f)
                git = try_git_describe()
                if git:
                    print(git, file=f)
            members.append("zulip-backup/zulip-version")

            with open(os.path.join(tmp, "zulip-backup", "os-version"),
                      "w") as f:
                print(
                    "{ID} {VERSION_ID}".format(**parse_os_release()),
                    file=f,
                )
            members.append("zulip-backup/os-version")

            with open(os.path.join(tmp, "zulip-backup", "postgres-version"),
                      "w") as f:
                print(connection.pg_version, file=f)
            members.append("zulip-backup/postgres-version")

            if settings.DEVELOPMENT:
                members.append(
                    os.path.join(settings.DEPLOY_ROOT, "zproject",
                                 "dev-secrets.conf"), )
                paths.append(
                    ("zproject", os.path.join(settings.DEPLOY_ROOT,
                                              "zproject")), )
            else:
                members.append("/etc/zulip")
                paths.append(("settings", "/etc/zulip"))

            if not options['skip_db']:
                pg_dump_command = [
                    "pg_dump",
                    "--format=directory",
                    "--file",
                    os.path.join(tmp, "zulip-backup", "database"),
                    "--host",
                    settings.DATABASES["default"]["HOST"],
                    "--port",
                    settings.DATABASES["default"]["PORT"],
                    "--username",
                    settings.DATABASES["default"]["USER"],
                    "--dbname",
                    settings.DATABASES["default"]["NAME"],
                    "--no-password",
                ]
                os.environ["PGPASSWORD"] = settings.DATABASES["default"][
                    "PASSWORD"]

                run(
                    pg_dump_command,
                    cwd=tmp,
                )
                members.append("zulip-backup/database")

            if not options[
                    'skip_uploads'] and settings.LOCAL_UPLOADS_DIR is not None and os.path.exists(
                        os.path.join(settings.DEPLOY_ROOT,
                                     settings.LOCAL_UPLOADS_DIR), ):
                members.append(
                    os.path.join(settings.DEPLOY_ROOT,
                                 settings.LOCAL_UPLOADS_DIR), )
                paths.append((
                    "uploads",
                    os.path.join(settings.DEPLOY_ROOT,
                                 settings.LOCAL_UPLOADS_DIR),
                ), )

            assert not any("|" in name or "|" in path for name, path in paths)
            transform_args = [
                r"--transform=s|^{}(/.*)?$|zulip-backup/{}\1|x".format(
                    re.escape(path),
                    name.replace("\\", r"\\"),
                ) for name, path in paths
            ]

            try:
                if options["output"] is None:
                    tarball_path = tempfile.NamedTemporaryFile(
                        prefix=f"zulip-backup-{timestamp}-",
                        suffix=".tar.gz",
                        delete=False,
                    ).name
                else:
                    tarball_path = options["output"]

                run([
                    "tar", "-C", tmp, "-cPzf", tarball_path, *transform_args,
                    "--", *members
                ])
                print(f"Backup tarball written to {tarball_path}")
            except BaseException:
                if options["output"] is None:
                    os.unlink(tarball_path)
                raise
Beispiel #28
0
def copy_parent_log(source_log, target_log):
    # type: (str, str) -> None
    if os.path.exists(source_log):
        run('cp {} {}'.format(source_log, target_log).split())
Beispiel #29
0
# Check the RAM on the user's system, and throw an effort if <1.5GB.
# This avoids users getting segfaults running `pip install` that are
# generally more annoying to debug.
with open("/proc/meminfo") as meminfo:
    ram_size = meminfo.readlines()[0].strip().split(" ")[-2]
ram_gb = float(ram_size) / 1024.0 / 1024.0
if ram_gb < 1.5:
    print("You have insufficient RAM (%s GB) to run the Zulip development environment." % (
        round(ram_gb, 2),))
    print("We recommend at least 2 GB of RAM, and require at least 1.5 GB.")
    sys.exit(1)

try:
    UUID_VAR_PATH = get_dev_uuid_var_path(create_if_missing=True)
    run(["mkdir", "-p", UUID_VAR_PATH])
    if os.path.exists(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')):
        os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink'))
    os.symlink(
        os.path.join(ZULIP_PATH, 'README.md'),
        os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')
    )
    os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink'))
except OSError:
    print(FAIL + "Error: Unable to create symlinks."
          "Make sure you have permission to create symbolic links." + ENDC)
    print("See this page for more information:")
    print("  https://zulip.readthedocs.io/en/latest/development/setup-vagrant.html#os-symlink-error")
    sys.exit(1)

if platform.architecture()[0] == '64bit':
Beispiel #30
0
def main(options):
    # type: (Any) -> int

    # yarn and management commands expect to be run from the root of the
    # project.
    os.chdir(ZULIP_PATH)

    # setup-apt-repo does an `apt-get update`
    # hash the apt dependencies
    sha_sum = hashlib.sha1()

    for apt_depedency in APT_DEPENDENCIES[codename]:
        sha_sum.update(apt_depedency.encode('utf8'))
    # hash the content of setup-apt-repo
    sha_sum.update(open('scripts/lib/setup-apt-repo', 'rb').read())

    new_apt_dependencies_hash = sha_sum.hexdigest()
    last_apt_dependencies_hash = None
    apt_hash_file_path = os.path.join(UUID_VAR_PATH, "apt_dependencies_hash")
    try:
        hash_file = open(apt_hash_file_path, 'r')
        last_apt_dependencies_hash = hash_file.read()
    except IOError:
        run(['touch', apt_hash_file_path])

    if (new_apt_dependencies_hash != last_apt_dependencies_hash):
        try:
            install_apt_deps()
        except subprocess.CalledProcessError:
            # Might be a failure due to network connection issues. Retrying...
            print(WARNING + "`apt-get -y install` failed while installing dependencies; retrying..." + ENDC)
            # Since a common failure mode is for the caching in
            # `setup-apt-repo` to optimize the fast code path to skip
            # running `apt-get update` when the target apt repository
            # is out of date, we run it explicitly here so that we
            # recover automatically.
            run(['sudo', 'apt-get', 'update'])
            install_apt_deps()
        hash_file = open(apt_hash_file_path, 'w')
        hash_file.write(new_apt_dependencies_hash)
    else:
        print("No changes to apt dependencies, so skipping apt operations.")

    # Here we install node.
    run(["sudo", "-H", "scripts/lib/install-node"])

    # This is a wrapper around `yarn`, which we run last since
    # it can often fail due to network issues beyond our control.
    try:
        # Hack: We remove `node_modules` as root to work around an
        # issue with the symlinks being improperly owned by root.
        if os.path.islink("node_modules"):
            run(["sudo", "rm", "-f", "node_modules"])
        run(["sudo", "mkdir", "-p", NODE_MODULES_CACHE_PATH])
        run(["sudo", "chown", "%s:%s" % (user_id, user_id), NODE_MODULES_CACHE_PATH])
        setup_node_modules(prefer_offline=True)
    except subprocess.CalledProcessError:
        print(WARNING + "`yarn install` failed; retrying..." + ENDC)
        setup_node_modules()

    # Import tools/setup_venv.py instead of running it so that we get an
    # activated virtualenv for the rest of the provisioning process.
    from tools.setup import setup_venvs
    setup_venvs.main()

    setup_shell_profile('~/.bash_profile')
    setup_shell_profile('~/.zprofile')

    run(["sudo", "cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])

    # create log directory `zulip/var/log`
    run(["mkdir", "-p", LOG_DIR_PATH])
    # create upload directory `var/uploads`
    run(["mkdir", "-p", UPLOAD_DIR_PATH])
    # create test upload directory `var/test_upload`
    run(["mkdir", "-p", TEST_UPLOAD_DIR_PATH])
    # create coverage directory`var/coverage`
    run(["mkdir", "-p", COVERAGE_DIR_PATH])
    # create linecoverage directory`var/linecoverage-report`
    run(["mkdir", "-p", LINECOVERAGE_DIR_PATH])
    # create linecoverage directory`var/node-coverage`
    run(["mkdir", "-p", NODE_TEST_COVERAGE_DIR_PATH])

    # `build_emoji` script requires `emoji-datasource` package which we install
    # via npm and hence it should be executed after we are done installing npm
    # packages.
    if not os.path.isdir(EMOJI_CACHE_PATH):
        run(["sudo", "mkdir", EMOJI_CACHE_PATH])
    run(["sudo", "chown", "%s:%s" % (user_id, user_id), EMOJI_CACHE_PATH])
    run(["tools/setup/emoji/build_emoji"])

    # copy over static files from the zulip_bots package
    run(["tools/setup/generate_zulip_bots_static_files"])

    run(["tools/generate-custom-icon-webfont"])
    run(["tools/setup/build_pygments_data"])
    run(["scripts/setup/generate_secrets.py", "--development"])
    run(["tools/update-authors-json", "--use-fixture"])
    run(["tools/inline-email-css"])
    if is_circleci or (is_travis and not options.is_production_travis):
        run(["sudo", "service", "rabbitmq-server", "restart"])
        run(["sudo", "service", "redis-server", "restart"])
        run(["sudo", "service", "memcached", "restart"])
        run(["sudo", "service", "postgresql", "restart"])
    elif options.is_docker:
        run(["sudo", "service", "rabbitmq-server", "restart"])
        run(["sudo", "pg_dropcluster", "--stop", POSTGRES_VERSION, "main"])
        run(["sudo", "pg_createcluster", "-e", "utf8", "--start", POSTGRES_VERSION, "main"])
        run(["sudo", "service", "redis-server", "restart"])
        run(["sudo", "service", "memcached", "restart"])
    if not options.is_production_travis:
        # The following block is skipped for the production Travis
        # suite, because that suite doesn't make use of these elements
        # of the development environment (it just uses the development
        # environment to build a release tarball).

        # Need to set up Django before using is_template_database_current
        os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
        import django
        django.setup()

        from zerver.lib.test_fixtures import is_template_database_current

        try:
            from zerver.lib.queue import SimpleQueueClient
            SimpleQueueClient()
            rabbitmq_is_configured = True
        except Exception:
            rabbitmq_is_configured = False

        if options.is_force or not rabbitmq_is_configured:
            run(["scripts/setup/configure-rabbitmq"])
        else:
            print("RabbitMQ is already configured.")

        migration_status_path = os.path.join(UUID_VAR_PATH, "migration_status_dev")
        if options.is_force or not is_template_database_current(
                migration_status=migration_status_path,
                settings="zproject.settings",
                database_name="zulip",
        ):
            run(["tools/setup/postgres-init-dev-db"])
            run(["tools/do-destroy-rebuild-database"])
        else:
            print("No need to regenerate the dev DB.")

        if options.is_force or not is_template_database_current():
            run(["tools/setup/postgres-init-test-db"])
            run(["tools/do-destroy-rebuild-test-database"])
        else:
            print("No need to regenerate the test DB.")

        # Consider updating generated translations data: both `.mo`
        # files and `language-options.json`.
        sha1sum = hashlib.sha1()
        paths = ['zerver/management/commands/compilemessages.py']
        paths += glob.glob('static/locale/*/LC_MESSAGES/*.po')
        paths += glob.glob('static/locale/*/translations.json')

        for path in paths:
            with open(path, 'rb') as file_to_hash:
                sha1sum.update(file_to_hash.read())

        compilemessages_hash_path = os.path.join(UUID_VAR_PATH, "last_compilemessages_hash")
        new_compilemessages_hash = sha1sum.hexdigest()
        run(['touch', compilemessages_hash_path])
        with open(compilemessages_hash_path, 'r') as hash_file:
            last_compilemessages_hash = hash_file.read()

        if options.is_force or (new_compilemessages_hash != last_compilemessages_hash):
            with open(compilemessages_hash_path, 'w') as hash_file:
                hash_file.write(new_compilemessages_hash)
            run(["./manage.py", "compilemessages"])
        else:
            print("No need to run `manage.py compilemessages`.")

    run(["scripts/lib/clean-unused-caches"])

    version_file = os.path.join(UUID_VAR_PATH, 'provision_version')
    print('writing to %s\n' % (version_file,))
    open(version_file, 'w').write(PROVISION_VERSION + '\n')

    print()
    print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
    return 0
def main(options: argparse.Namespace) -> int:
    setup_bash_profile()
    setup_shell_profile("~/.zprofile")

    # This needs to happen before anything that imports zproject.settings.
    run(["scripts/setup/generate_secrets.py", "--development"])

    create_var_directories()

    # The `build_emoji` script requires `emoji-datasource` package
    # which we install via npm; thus this step is after installing npm
    # packages.
    run(["tools/setup/emoji/build_emoji"])

    # copy over static files from the zulip_bots package
    generate_zulip_bots_static_files()

    if options.is_force or need_to_run_build_pygments_data():
        run(["tools/setup/build_pygments_data"])
        write_new_digest(
            "build_pygments_data_hash",
            build_pygments_data_paths(),
            [pygments_version],
        )
    else:
        print("No need to run `tools/setup/build_pygments_data`.")

    if options.is_force or need_to_run_build_timezone_data():
        run(["tools/setup/build_timezone_values"])
        write_new_digest(
            "build_timezones_data_hash",
            build_timezones_data_paths(),
            [timezones_version],
        )
    else:
        print("No need to run `tools/setup/build_timezone_values`.")

    if options.is_force or need_to_run_inline_email_css():
        run(["scripts/setup/inline_email_css.py"])
        write_new_digest(
            "last_email_source_files_hash",
            inline_email_css_paths(),
        )
    else:
        print("No need to run `scripts/setup/inline_email_css.py`.")

    if not options.is_build_release_tarball_only:
        # The following block is skipped when we just need the development
        # environment to build a release tarball.

        # Need to set up Django before using template_status
        os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
        import django

        django.setup()

        from django.conf import settings

        from zerver.lib.test_fixtures import (
            DEV_DATABASE,
            TEST_DATABASE,
            destroy_leaked_test_databases,
        )

        if options.is_force or need_to_run_configure_rabbitmq(
            [settings.RABBITMQ_PASSWORD]):
            run(["scripts/setup/configure-rabbitmq"])
            write_new_digest(
                "last_configure_rabbitmq_hash",
                configure_rabbitmq_paths(),
                [settings.RABBITMQ_PASSWORD],
            )
        else:
            print("No need to run `scripts/setup/configure-rabbitmq.")

        dev_template_db_status = DEV_DATABASE.template_status()
        if options.is_force or dev_template_db_status == "needs_rebuild":
            run(["tools/setup/postgresql-init-dev-db"])
            if options.skip_dev_db_build:
                # We don't need to build the manual development
                # database on continuous integration for running tests, so we can
                # just leave it as a template db and save a minute.
                #
                # Important: We don't write a digest as that would
                # incorrectly claim that we ran migrations.
                pass
            else:
                run(["tools/rebuild-dev-database"])
                DEV_DATABASE.write_new_db_digest()
        elif dev_template_db_status == "run_migrations":
            DEV_DATABASE.run_db_migrations()
        elif dev_template_db_status == "current":
            print("No need to regenerate the dev DB.")

        test_template_db_status = TEST_DATABASE.template_status()
        if options.is_force or test_template_db_status == "needs_rebuild":
            run(["tools/setup/postgresql-init-test-db"])
            run(["tools/rebuild-test-database"])
            TEST_DATABASE.write_new_db_digest()
        elif test_template_db_status == "run_migrations":
            TEST_DATABASE.run_db_migrations()
        elif test_template_db_status == "current":
            print("No need to regenerate the test DB.")

        if options.is_force or need_to_run_compilemessages():
            run(["./manage.py", "compilemessages"])
            write_new_digest(
                "last_compilemessages_hash",
                compilemessages_paths(),
            )
        else:
            print("No need to run `manage.py compilemessages`.")

        destroyed = destroy_leaked_test_databases()
        if destroyed:
            print(f"Dropped {destroyed} stale test databases!")

    clean_unused_caches.main(
        argparse.Namespace(
            threshold_days=6,
            # The defaults here should match parse_cache_script_args in zulip_tools.py
            dry_run=False,
            verbose=False,
            no_headings=True,
        ))

    # Keeping this cache file around can cause eslint to throw
    # random TypeErrors when new/updated dependencies are added
    if os.path.isfile(".eslintcache"):
        # Remove this block when
        # https://github.com/eslint/eslint/issues/11639 is fixed
        # upstream.
        os.remove(".eslintcache")

    # Clean up the root of the `var/` directory for various
    # testing-related files that we have migrated to
    # `var/<uuid>/test-backend`.
    print("Cleaning var/ directory files...")
    var_paths = glob.glob("var/test*")
    var_paths.append("var/bot_avatar")
    for path in var_paths:
        try:
            if os.path.isdir(path):
                shutil.rmtree(path)
            else:
                os.remove(path)
        except FileNotFoundError:
            pass

    version_file = os.path.join(UUID_VAR_PATH, "provision_version")
    print(f"writing to {version_file}\n")
    with open(version_file, "w") as f:
        f.write(PROVISION_VERSION + "\n")

    print()
    print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
    return 0
Beispiel #32
0
    def handle(self, *args: Any, **options: Any) -> None:
        timestamp = timezone_now().strftime(TIMESTAMP_FORMAT)
        with tempfile.TemporaryDirectory(prefix="zulip-backup-%s-" %
                                         (timestamp, )) as tmp:
            os.mkdir(os.path.join(tmp, "zulip-backup"))
            members = []
            paths = []

            with open(os.path.join(tmp, "zulip-backup", "zulip-version"),
                      "w") as f:
                print(ZULIP_VERSION, file=f)
                git = try_git_describe()
                if git:
                    print(git, file=f)
            members.append("zulip-backup/zulip-version")

            with open(os.path.join(tmp, "zulip-backup", "os-version"),
                      "w") as f:
                print(
                    "{DISTRIB_ID} {DISTRIB_CODENAME}".format(
                        **parse_lsb_release()),
                    file=f,
                )
            members.append("zulip-backup/os-version")

            with open(os.path.join(tmp, "zulip-backup", "postgres-version"),
                      "w") as f:
                print(connection.pg_version, file=f)
            members.append("zulip-backup/postgres-version")

            if settings.DEVELOPMENT:
                members.append(
                    os.path.join(settings.DEPLOY_ROOT, "zproject",
                                 "dev-secrets.conf"))
                paths.append(
                    ("zproject", os.path.join(settings.DEPLOY_ROOT,
                                              "zproject")))
            else:
                members.append("/etc/zulip")
                paths.append(("settings", "/etc/zulip"))

            if not options['skip_db']:
                db_name = settings.DATABASES["default"]["NAME"]
                db_dir = os.path.join(tmp, "zulip-backup", "database")
                run(
                    [
                        "pg_dump", "--format=directory", "--file", db_dir,
                        "--", db_name
                    ],
                    cwd=tmp,
                )
                members.append("zulip-backup/database")

            if not options[
                    'skip_uploads'] and settings.LOCAL_UPLOADS_DIR is not None and os.path.exists(
                        os.path.join(settings.DEPLOY_ROOT,
                                     settings.LOCAL_UPLOADS_DIR)):
                members.append(
                    os.path.join(settings.DEPLOY_ROOT,
                                 settings.LOCAL_UPLOADS_DIR))
                paths.append((
                    "uploads",
                    os.path.join(settings.DEPLOY_ROOT,
                                 settings.LOCAL_UPLOADS_DIR),
                ))

            assert not any("|" in name or "|" in path for name, path in paths)
            transform_args = [
                r"--transform=s|^{}(/.*)?$|zulip-backup/{}\1|x".format(
                    re.escape(path), name.replace("\\", r"\\"))
                for name, path in paths
            ]

            try:
                if options["output"] is None:
                    tarball_path = tempfile.NamedTemporaryFile(
                        prefix="zulip-backup-%s-" % (timestamp, ),
                        suffix=".tar.gz",
                        delete=False,
                    ).name
                else:
                    tarball_path = options["output"]

                run(["tar", "-C", tmp, "-cPzf", tarball_path] +
                    transform_args + ["--"] + members)
                print("Backup tarball written to %s" % (tarball_path, ))
            except BaseException:
                if options["output"] is None:
                    os.unlink(tarball_path)
                raise
Beispiel #33
0
def install_venv_deps(requirements_file):
    # type: (str) -> None
    pip_requirements = os.path.join(ZULIP_PATH, "requirements", "pip.txt")
    run(["pip", "install", "-U", "--requirement", pip_requirements])
    run(["pip", "install", "--no-deps", "--requirement", requirements_file])
Beispiel #34
0
def install_yum_deps(deps_to_install, retry=False):
    # type: (List[str], bool) -> None
    print(WARNING + "RedHat support is still experimental.")
    run(["sudo", "./scripts/lib/setup-yum-repo"])

    # Hack specific to unregistered RHEL system.  The moreutils
    # package requires a perl module package, which isn't available in
    # the unregistered RHEL repositories.
    #
    # Error: Package: moreutils-0.49-2.el7.x86_64 (epel)
    #        Requires: perl(IPC::Run)
    yum_extra_flags = []  # type: List[str]
    if vendor == 'RedHat':
        exitcode, subs_status = subprocess.getstatusoutput(
            "sudo subscription-manager status")
        if exitcode == 1:
            # TODO this might overkill since `subscription-manager` is already
            # called in setup-yum-repo
            if 'Status' in subs_status:
                # The output is well-formed
                yum_extra_flags = ["--skip-broken"]
            else:
                print(
                    "Unrecognized output. `subscription-manager` might not be available"
                )

    run(["sudo", "yum", "install", "-y"] + yum_extra_flags + deps_to_install)
    if vendor in ["CentOS", "RedHat"]:
        # This is how a pip3 is installed to /usr/bin in CentOS/RHEL
        # for python35 and later.
        run(["sudo", "python36", "-m", "ensurepip"])
        # `python36` is not aliased to `python3` by default
        run(["sudo", "ln", "-nsf", "/usr/bin/python36", "/usr/bin/python3"])
    postgres_dir = 'pgsql-%s' % (POSTGRES_VERSION, )
    for cmd in ['pg_config', 'pg_isready', 'psql']:
        # Our tooling expects these postgres scripts to be at
        # well-known paths.  There's an argument for eventually
        # making our tooling auto-detect, but this is simpler.
        run([
            "sudo", "ln", "-nsf",
            "/usr/%s/bin/%s" % (postgres_dir, cmd),
            "/usr/bin/%s" % (cmd, )
        ])
    # Compile tsearch-extras from scratch, since we maintain the
    # package and haven't built an RPM package for it.
    run(["sudo", "./scripts/lib/build-tsearch-extras"])
    if vendor == "Fedora":
        # Compile PGroonga from scratch, since pgroonga upstream
        # doesn't provide Fedora packages.
        run(["sudo", "./scripts/lib/build-pgroonga"])

    # From here, we do the first-time setup/initialization for the postgres database.
    pg_datadir = "/var/lib/pgsql/%s/data" % (POSTGRES_VERSION, )
    pg_hba_conf = os.path.join(pg_datadir, "pg_hba.conf")

    # We can't just check if the file exists with os.path, since the
    # current user likely doesn't have permission to read the
    # pg_datadir directory.
    if subprocess.call(["sudo", "test", "-e", pg_hba_conf]) == 0:
        # Skip setup if it has been applied previously
        return

    run([
        "sudo", "-H",
        "/usr/%s/bin/postgresql-%s-setup" % (postgres_dir, POSTGRES_VERSION),
        "initdb"
    ])
    # Use vendored pg_hba.conf, which enables password authentication.
    run([
        "sudo", "cp", "-a", "puppet/zulip/files/postgresql/centos_pg_hba.conf",
        pg_hba_conf
    ])
Beispiel #35
0
    def handle(self, *args: Any, **options: Any) -> None:
        timestamp = timezone_now().strftime(TIMESTAMP_FORMAT)

        with tempfile.TemporaryDirectory(
            prefix="zulip-backup-%s-" % (timestamp,)
        ) as tmp:
            os.mkdir(os.path.join(tmp, "zulip-backup"))
            members = []

            with open(os.path.join(tmp, "zulip-backup", "zulip-version"), "w") as f:
                print(ZULIP_VERSION, file=f)
                git = try_git_describe()
                if git:
                    print(git, file=f)
            members.append("zulip-backup/zulip-version")

            with open(os.path.join(tmp, "zulip-backup", "os-version"), "w") as f:
                print(
                    "{DISTRIB_ID} {DISTRIB_CODENAME}".format(**parse_lsb_release()),
                    file=f,
                )
            members.append("zulip-backup/os-version")

            with open(os.path.join(tmp, "zulip-backup", "postgres-version"), "w") as f:
                print(connection.pg_version, file=f)
            members.append("zulip-backup/postgres-version")

            if settings.DEVELOPMENT:
                os.symlink(
                    os.path.join(settings.DEPLOY_ROOT, "zproject"),
                    os.path.join(tmp, "zulip-backup", "zproject"),
                )
                members.append("zulip-backup/zproject/dev-secrets.conf")
            else:
                os.symlink("/etc/zulip", os.path.join(tmp, "zulip-backup", "settings"))
                members.append("zulip-backup/settings")

            db_name = settings.DATABASES["default"]["NAME"]
            db_dir = os.path.join(tmp, "zulip-backup", "database")
            run(
                ["pg_dump", "--format=directory", "--file", db_dir, "--", db_name],
                cwd=tmp,
            )
            members.append("zulip-backup/database")

            if settings.LOCAL_UPLOADS_DIR is not None and os.path.exists(
                os.path.join(settings.DEPLOY_ROOT, settings.LOCAL_UPLOADS_DIR)
            ):
                os.symlink(
                    os.path.join(settings.DEPLOY_ROOT, settings.LOCAL_UPLOADS_DIR),
                    os.path.join(tmp, "zulip-backup", "uploads"),
                )
                members.append("zulip-backup/uploads")

            try:
                if options["output"] is None:
                    tarball_path = tempfile.NamedTemporaryFile(
                        prefix="zulip-backup-%s-" % (timestamp,),
                        suffix=".tar.gz",
                        delete=False,
                    ).name
                else:
                    tarball_path = options["output"]

                run(["tar", "-C", tmp, "-chzf", tarball_path, "--"] + members)
                print("Backup tarball written to %s" % (tarball_path,))
            except BaseException:
                if options["output"] is None:
                    os.unlink(tarball_path)
                raise
Beispiel #36
0
def do_setup_virtualenv(venv_path, requirements_file, virtualenv_args):
    # type: (str, str, List[str]) -> None

    # Setup Python virtualenv
    new_packages = set(get_package_names(requirements_file))

    run(["sudo", "rm", "-rf", venv_path])
    if not try_to_copy_venv(venv_path, new_packages):
        # Create new virtualenv.
        run(["sudo", "mkdir", "-p", venv_path])
        run(["sudo", "virtualenv"] + virtualenv_args + [venv_path])
        run(["sudo", "chown", "-R",
             "{}:{}".format(os.getuid(), os.getgid()), venv_path])
        create_log_entry(get_logfile_name(venv_path), "", set(), new_packages)

    create_requirements_index_file(venv_path, requirements_file)
    # Switch current Python context to the virtualenv.
    activate_this = os.path.join(venv_path, "bin", "activate_this.py")
    exec(open(activate_this).read(), {}, dict(__file__=activate_this))

    # use custom certificate if needed
    if os.environ.get('CUSTOM_CA_CERTIFICATES'):
        print("Configuring pip to use custom CA certificates...")
        add_cert_to_pipconf()

    # CentOS-specific hack/workaround
    # Install pycurl with custom flag due to this error when installing
    # via pip:
    # __main__.ConfigurationError: Curl is configured to use SSL, but
    # we have not been able to determine which SSL backend it is using.
    # Please see PycURL documentation for how to specify the SSL
    # backend manually.
    # See https://github.com/pycurl/pycurl/issues/526
    # The fix exists on pycurl master, but not yet in any release
    # We can likely remove this when pycurl > 7.43.0.2 comes out.
    if os.path.exists("/etc/redhat-release"):
        pycurl_env = os.environ.copy()
        pycurl_env["PYCURL_SSL_LIBRARY"] = "nss"
        run(["pip", "install", "pycurl==7.43.0.2", "--compile", "--no-cache-dir"],
            env=pycurl_env)

    try:
        install_venv_deps(requirements_file)
    except subprocess.CalledProcessError:
        # Might be a failure due to network connection issues. Retrying...
        print(WARNING + "`pip install` failed; retrying..." + ENDC)
        install_venv_deps(requirements_file)
    run(["sudo", "chmod", "-R", "a+rX", venv_path])
Beispiel #37
0
# Check the RAM on the user's system, and throw an effort if <1.5GB.
# This avoids users getting segfaults running `pip install` that are
# generally more annoying to debug.
with open("/proc/meminfo") as meminfo:
    ram_size = meminfo.readlines()[0].strip().split(" ")[-2]
ram_gb = float(ram_size) / 1024.0 / 1024.0
if ram_gb < 1.5:
    print("You have insufficient RAM (%s GB) to run the Zulip development environment." % (
        round(ram_gb, 2),))
    print("We recommend at least 2 GB of RAM, and require at least 1.5 GB.")
    sys.exit(1)

try:
    UUID_VAR_PATH = get_dev_uuid_var_path(create_if_missing=True)
    run(["mkdir", "-p", UUID_VAR_PATH])
    if os.path.exists(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')):
        os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink'))
    os.symlink(
        os.path.join(ZULIP_PATH, 'README.md'),
        os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')
    )
    os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink'))
except OSError as err:
    print(FAIL + "Error: Unable to create symlinks."
          "Make sure you have permission to create symbolic links." + ENDC)
    print("See this page for more information:")
    print("  https://zulip.readthedocs.io/en/latest/development/setup-vagrant.html#os-symlink-error")
    sys.exit(1)

if platform.architecture()[0] == '64bit':
Beispiel #38
0
def main():
    # type: () -> int

    # npm install and management commands expect to be run from the root of the
    # project.
    os.chdir(ZULIP_PATH)

    run(["sudo", "./scripts/lib/setup-apt-repo"])
    # Add groonga repository to get the pgroonga packages
    run(["sudo", "add-apt-repository", "-y", "ppa:groonga/ppa"])
    run(["sudo", "apt-get", "update"])
    run(["sudo", "apt-get", "-y", "install", "--no-install-recommends"] + APT_DEPENDENCIES[codename])

    if TRAVIS:
        if PY2:
            MYPY_REQS_FILE = os.path.join(ZULIP_PATH, "requirements", "mypy.txt")
            setup_virtualenv(PY3_VENV_PATH, MYPY_REQS_FILE, patch_activate_script=True,
                             virtualenv_args=['-p', 'python3'])
            DEV_REQS_FILE = os.path.join(ZULIP_PATH, "requirements", "py2_dev.txt")
            setup_virtualenv(PY2_VENV_PATH, DEV_REQS_FILE, patch_activate_script=True)
        else:
            TWISTED_REQS_FILE = os.path.join(ZULIP_PATH, "requirements", "twisted.txt")
            setup_virtualenv("/srv/zulip-py2-twisted-venv", TWISTED_REQS_FILE,
                             patch_activate_script=True)
            DEV_REQS_FILE = os.path.join(ZULIP_PATH, "requirements", "py3_dev.txt")
            setup_virtualenv(VENV_PATH, DEV_REQS_FILE, patch_activate_script=True,
                             virtualenv_args=['-p', 'python3'])
    else:
        # Import tools/setup_venv.py instead of running it so that we get an
        # activated virtualenv for the rest of the provisioning process.
        from tools.setup import setup_venvs
        setup_venvs.main()

    # Put Python2 virtualenv activation in our .bash_profile.
    with open(os.path.expanduser('~/.bash_profile'), 'w+') as bash_profile:
        bash_profile.writelines([
            "source .bashrc\n",
            "source %s\n" % (os.path.join(VENV_PATH, "bin", "activate"),),
        ])

    run(["sudo", "cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])

    # create log directory `zulip/var/log`
    run(["mkdir", "-p", LOG_DIR_PATH])
    # create upload directory `var/uploads`
    run(["mkdir", "-p", UPLOAD_DIR_PATH])
    # create test upload directory `var/test_upload`
    run(["mkdir", "-p", TEST_UPLOAD_DIR_PATH])
    # create coverage directory`var/coverage`
    run(["mkdir", "-p", COVERAGE_DIR_PATH])
    # create linecoverage directory`var/linecoverage-report`
    run(["mkdir", "-p", LINECOVERAGE_DIR_PATH])
    # create linecoverage directory`var/node-coverage`
    run(["mkdir", "-p", NODE_TEST_COVERAGE_DIR_PATH])

    if TRAVIS:
        run(["tools/setup/install-phantomjs", "--travis"])
    else:
        run(["tools/setup/install-phantomjs"])
    run(["tools/setup/download-zxcvbn"])
    run(["tools/setup/emoji_dump/build_emoji"])
    run(["scripts/setup/generate_secrets.py", "-d"])
    if TRAVIS and not PRODUCTION_TRAVIS:
        run(["sudo", "service", "rabbitmq-server", "restart"])
        run(["sudo", "service", "redis-server", "restart"])
        run(["sudo", "service", "memcached", "restart"])
    elif "--docker" in sys.argv:
        run(["sudo", "service", "rabbitmq-server", "restart"])
        run(["sudo", "pg_dropcluster", "--stop", POSTGRES_VERSION, "main"])
        run(["sudo", "pg_createcluster", "-e", "utf8", "--start", POSTGRES_VERSION, "main"])
        run(["sudo", "service", "redis-server", "restart"])
        run(["sudo", "service", "memcached", "restart"])
    if not PRODUCTION_TRAVIS:
        # These won't be used anyway
        run(["scripts/setup/configure-rabbitmq"])
        run(["tools/setup/postgres-init-dev-db"])
        run(["tools/do-destroy-rebuild-database"])
        run(["tools/setup/postgres-init-test-db"])
        run(["tools/do-destroy-rebuild-test-database"])
        run(["python", "./manage.py", "compilemessages"])
    # Install the pinned version of npm.
    install_npm()
    # Run npm install last because it can be flaky, and that way one
    # only needs to rerun `npm install` to fix the installation.
    try:
        setup_node_modules()
    except subprocess.CalledProcessError:
        print(WARNING + "`npm install` failed; retrying..." + ENDC)
        setup_node_modules()

    print()
    print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
    return 0
Beispiel #39
0
def do_setup_virtualenv(venv_path, requirements_file, virtualenv_args):
    # type: (str, str, List[str]) -> None

    # Setup Python virtualenv
    new_packages = set(get_package_names(requirements_file))

    run_as_root(["rm", "-rf", venv_path])
    if not try_to_copy_venv(venv_path, new_packages):
        # Create new virtualenv.
        run_as_root(["mkdir", "-p", venv_path])
        run_as_root(["virtualenv"] + virtualenv_args + [venv_path])
        run_as_root([
            "chown", "-R", "{}:{}".format(os.getuid(), os.getgid()), venv_path
        ])
        create_log_entry(get_logfile_name(venv_path), "", set(), new_packages)

    create_requirements_index_file(venv_path, requirements_file)

    pip = os.path.join(venv_path, "bin", "pip")

    # use custom certificate if needed
    if os.environ.get('CUSTOM_CA_CERTIFICATES'):
        print("Configuring pip to use custom CA certificates...")
        add_cert_to_pipconf()

    # CentOS-specific hack/workaround
    # Install pycurl with custom flag due to this error when installing
    # via pip:
    # __main__.ConfigurationError: Curl is configured to use SSL, but
    # we have not been able to determine which SSL backend it is using.
    # Please see PycURL documentation for how to specify the SSL
    # backend manually.
    # See https://github.com/pycurl/pycurl/issues/526
    # The fix exists on pycurl master, but not yet in any release
    # We can likely remove this when pycurl > 7.43.0.2 comes out.
    if os.path.exists("/etc/redhat-release"):
        pycurl_env = os.environ.copy()
        pycurl_env["PYCURL_SSL_LIBRARY"] = "nss"
        run([
            pip, "install", "pycurl==7.43.0.2", "--compile", "--no-cache-dir"
        ],
            env=pycurl_env)

    try:
        install_venv_deps(pip, requirements_file)
    except subprocess.CalledProcessError:
        # Might be a failure due to network connection issues. Retrying...
        print(WARNING + "`pip install` failed; retrying..." + ENDC)
        install_venv_deps(pip, requirements_file)

    # The typing module has been included in stdlib since 3.5.
    # Installing a pypi version of it has been harmless until a bug
    # "AttributeError: type object 'Callable' has no attribute
    # '_abc_registry'" happens in 3.7. And so just to be safe, it is
    # disabled from now on for all >= 3.5 versions.
    # Remove this once 3.4 is no longer supported.
    at_least_35 = (sys.version_info.major
                   == 3) and (sys.version_info.minor >= 5)
    if at_least_35 and ('python2.7' not in virtualenv_args):
        run([pip, "uninstall", "-y", "typing"])

    run_as_root(["chmod", "-R", "a+rX", venv_path])
Beispiel #40
0
def install_apt_deps():
    # type: () -> None
    # setup-apt-repo does an `apt-get update`
    run(["sudo", "./scripts/lib/setup-apt-repo"])
    run(["sudo", "apt-get", "-y", "install", "--no-install-recommends"] + APT_DEPENDENCIES[codename])
Beispiel #41
0
def main(options):
    # type: (Any) -> int

    # yarn and management commands expect to be run from the root of the
    # project.
    os.chdir(ZULIP_PATH)

    # hash the apt dependencies
    sha_sum = hashlib.sha1()

    for apt_depedency in SYSTEM_DEPENDENCIES:
        sha_sum.update(apt_depedency.encode('utf8'))
    if vendor in ["Ubuntu", "Debian"]:
        sha_sum.update(open('scripts/lib/setup-apt-repo', 'rb').read())
    else:
        # hash the content of setup-yum-repo and build-*
        sha_sum.update(open('scripts/lib/setup-yum-repo', 'rb').read())
        build_paths = glob.glob("scripts/lib/build-")
        for bp in build_paths:
            sha_sum.update(open(bp, 'rb').read())

    new_apt_dependencies_hash = sha_sum.hexdigest()
    last_apt_dependencies_hash = None
    apt_hash_file_path = os.path.join(UUID_VAR_PATH, "apt_dependencies_hash")
    with open(apt_hash_file_path, 'a+') as hash_file:
        hash_file.seek(0)
        last_apt_dependencies_hash = hash_file.read()

    if (new_apt_dependencies_hash != last_apt_dependencies_hash):
        try:
            install_system_deps()
        except subprocess.CalledProcessError:
            # Might be a failure due to network connection issues. Retrying...
            print(WARNING +
                  "Installing system dependencies failed; retrying..." + ENDC)
            install_system_deps()
        with open(apt_hash_file_path, 'w') as hash_file:
            hash_file.write(new_apt_dependencies_hash)
    else:
        print("No changes to apt dependencies, so skipping apt operations.")

    # Here we install node.
    proxy_env = [
        "env",
        "http_proxy=" + os.environ.get("http_proxy", ""),
        "https_proxy=" + os.environ.get("https_proxy", ""),
        "no_proxy=" + os.environ.get("no_proxy", ""),
    ]
    run_as_root(proxy_env + ["scripts/lib/install-node"], sudo_args=['-H'])

    # This is a wrapper around `yarn`, which we run last since
    # it can often fail due to network issues beyond our control.
    try:
        # Hack: We remove `node_modules` as root to work around an
        # issue with the symlinks being improperly owned by root.
        if os.path.islink("node_modules"):
            run_as_root(["rm", "-f", "node_modules"])
        run_as_root(["mkdir", "-p", NODE_MODULES_CACHE_PATH])
        run_as_root(
            ["chown",
             "%s:%s" % (user_id, user_id), NODE_MODULES_CACHE_PATH])
        setup_node_modules(prefer_offline=True)
    except subprocess.CalledProcessError:
        print(WARNING + "`yarn install` failed; retrying..." + ENDC)
        try:
            setup_node_modules()
        except subprocess.CalledProcessError:
            print(
                FAIL +
                "`yarn install` is failing; check your network connection (and proxy settings)."
                + ENDC)
            sys.exit(1)

    # Install shellcheck.
    run_as_root(["scripts/lib/install-shellcheck"])

    from tools.setup import setup_venvs
    setup_venvs.main()

    activate_this = "/srv/zulip-py3-venv/bin/activate_this.py"
    exec(open(activate_this).read(), dict(__file__=activate_this))

    setup_shell_profile('~/.bash_profile')
    setup_shell_profile('~/.zprofile')

    # This needs to happen before anything that imports zproject.settings.
    run(["scripts/setup/generate_secrets.py", "--development"])

    run_as_root(["cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])

    # create log directory `zulip/var/log`
    os.makedirs(LOG_DIR_PATH, exist_ok=True)
    # create upload directory `var/uploads`
    os.makedirs(UPLOAD_DIR_PATH, exist_ok=True)
    # create test upload directory `var/test_upload`
    os.makedirs(TEST_UPLOAD_DIR_PATH, exist_ok=True)
    # create coverage directory `var/coverage`
    os.makedirs(COVERAGE_DIR_PATH, exist_ok=True)
    # create linecoverage directory `var/node-coverage`
    os.makedirs(NODE_TEST_COVERAGE_DIR_PATH, exist_ok=True)
    # create XUnit XML test results directory`var/xunit-test-results`
    os.makedirs(XUNIT_XML_TEST_RESULTS_DIR_PATH, exist_ok=True)

    # The `build_emoji` script requires `emoji-datasource` package
    # which we install via npm; thus this step is after installing npm
    # packages.
    if not os.path.isdir(EMOJI_CACHE_PATH):
        run_as_root(["mkdir", EMOJI_CACHE_PATH])
    run_as_root(["chown", "%s:%s" % (user_id, user_id), EMOJI_CACHE_PATH])
    run(["tools/setup/emoji/build_emoji"])

    # copy over static files from the zulip_bots package
    generate_zulip_bots_static_files()

    webfont_paths = [
        "tools/setup/generate-custom-icon-webfont",
        "static/icons/fonts/template.hbs"
    ]
    webfont_paths += glob.glob('static/assets/icons/*')
    if file_or_package_hash_updated(webfont_paths, "webfont_files_hash",
                                    options.is_force):
        run(["tools/setup/generate-custom-icon-webfont"])
    else:
        print("No need to run `tools/setup/generate-custom-icon-webfont`.")

    build_pygments_data_paths = [
        "tools/setup/build_pygments_data", "tools/setup/lang.json"
    ]
    from pygments import __version__ as pygments_version
    if file_or_package_hash_updated(build_pygments_data_paths,
                                    "build_pygments_data_hash",
                                    options.is_force, [pygments_version]):
        run(["tools/setup/build_pygments_data"])
    else:
        print("No need to run `tools/setup/build_pygments_data`.")

    update_authors_json_paths = [
        "tools/update-authors-json", "zerver/tests/fixtures/authors.json"
    ]
    if file_or_package_hash_updated(update_authors_json_paths,
                                    "update_authors_json_hash",
                                    options.is_force):
        run(["tools/update-authors-json", "--use-fixture"])
    else:
        print("No need to run `tools/update-authors-json`.")

    email_source_paths = [
        "tools/inline-email-css", "templates/zerver/emails/email.css"
    ]
    email_source_paths += glob.glob('templates/zerver/emails/*.source.html')
    if file_or_package_hash_updated(email_source_paths,
                                    "last_email_source_files_hash",
                                    options.is_force):
        run(["tools/inline-email-css"])
    else:
        print("No need to run `tools/inline-email-css`.")

    if is_circleci or (is_travis and not options.is_production_travis):
        run_as_root(["service", "rabbitmq-server", "restart"])
        run_as_root(["service", "redis-server", "restart"])
        run_as_root(["service", "memcached", "restart"])
        run_as_root(["service", "postgresql", "restart"])
    elif family == 'redhat':
        for service in [
                "postgresql-%s" % (POSTGRES_VERSION, ), "rabbitmq-server",
                "memcached", "redis"
        ]:
            run_as_root(["systemctl", "enable", service], sudo_args=['-H'])
            run_as_root(["systemctl", "start", service], sudo_args=['-H'])
    if not options.is_production_travis:
        # The following block is skipped for the production Travis
        # suite, because that suite doesn't make use of these elements
        # of the development environment (it just uses the development
        # environment to build a release tarball).

        # Need to set up Django before using template_database_status
        os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
        import django
        django.setup()

        from zerver.lib.test_fixtures import template_database_status, run_db_migrations, \
            destroy_leaked_test_databases

        try:
            from zerver.lib.queue import SimpleQueueClient
            SimpleQueueClient()
            rabbitmq_is_configured = True
        except Exception:
            rabbitmq_is_configured = False

        if options.is_force or not rabbitmq_is_configured:
            run(["scripts/setup/configure-rabbitmq"])
        else:
            print("RabbitMQ is already configured.")

        migration_status_path = os.path.join(UUID_VAR_PATH,
                                             "migration_status_dev")
        dev_template_db_status = template_database_status(
            migration_status=migration_status_path,
            settings="zproject.settings",
            database_name="zulip",
        )
        if options.is_force or dev_template_db_status == 'needs_rebuild':
            run(["tools/setup/postgres-init-dev-db"])
            run(["tools/do-destroy-rebuild-database"])
        elif dev_template_db_status == 'run_migrations':
            run_db_migrations('dev')
        elif dev_template_db_status == 'current':
            print("No need to regenerate the dev DB.")

        test_template_db_status = template_database_status()
        if options.is_force or test_template_db_status == 'needs_rebuild':
            run(["tools/setup/postgres-init-test-db"])
            run(["tools/do-destroy-rebuild-test-database"])
        elif test_template_db_status == 'run_migrations':
            run_db_migrations('test')
        elif test_template_db_status == 'current':
            print("No need to regenerate the test DB.")

        # Consider updating generated translations data: both `.mo`
        # files and `language-options.json`.
        paths = ['zerver/management/commands/compilemessages.py']
        paths += glob.glob('locale/*/LC_MESSAGES/*.po')
        paths += glob.glob('locale/*/translations.json')

        if file_or_package_hash_updated(paths, "last_compilemessages_hash",
                                        options.is_force):
            run(["./manage.py", "compilemessages"])
        else:
            print("No need to run `manage.py compilemessages`.")

        destroyed = destroy_leaked_test_databases()
        if destroyed:
            print("Dropped %s stale test databases!" % (destroyed, ))

    run(["scripts/lib/clean-unused-caches"])

    # Keeping this cache file around can cause eslint to throw
    # random TypeErrors when new/updated dependencies are added
    if os.path.isfile('.eslintcache'):
        # Remove this block when
        # https://github.com/eslint/eslint/issues/11639 is fixed
        # upstream.
        os.remove('.eslintcache')

    # Clean up the root of the `var/` directory for various
    # testing-related files that we have migrated to
    # `var/<uuid>/test-backend`.
    print("Cleaning var/ directory files...")
    var_paths = glob.glob('var/test*')
    var_paths.append('var/bot_avatar')
    for path in var_paths:
        try:
            if os.path.isdir(path):
                shutil.rmtree(path)
            else:
                os.remove(path)
        except FileNotFoundError:
            pass

    version_file = os.path.join(UUID_VAR_PATH, 'provision_version')
    print('writing to %s\n' % (version_file, ))
    open(version_file, 'w').write(PROVISION_VERSION + '\n')

    print()
    print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
    return 0
Beispiel #42
0
def main(options: argparse.Namespace) -> int:
    setup_bash_profile()
    setup_shell_profile('~/.zprofile')

    # This needs to happen before anything that imports zproject.settings.
    run(["scripts/setup/generate_secrets.py", "--development"])

    create_var_directories()

    # The `build_emoji` script requires `emoji-datasource` package
    # which we install via npm; thus this step is after installing npm
    # packages.
    run(["tools/setup/emoji/build_emoji"])

    # copy over static files from the zulip_bots package
    generate_zulip_bots_static_files()

    if options.is_force or need_to_run_build_pygments_data():
        run(["tools/setup/build_pygments_data"])
        write_new_digest(
            'build_pygments_data_hash',
            build_pygments_data_paths(),
            [pygments_version]
        )
    else:
        print("No need to run `tools/setup/build_pygments_data`.")

    if options.is_force or need_to_run_inline_email_css():
        run(["scripts/setup/inline_email_css.py"])
        write_new_digest(
            "last_email_source_files_hash",
            inline_email_css_paths(),
        )
    else:
        print("No need to run `scripts/setup/inline_email_css.py`.")

    if not options.is_build_release_tarball_only:
        # The following block is skipped when we just need the development
        # environment to build a release tarball.

        # Need to set up Django before using template_status
        os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
        import django
        django.setup()

        from zerver.lib.test_fixtures import (
            DEV_DATABASE,
            TEST_DATABASE,
            destroy_leaked_test_databases,
        )
        from django.conf import settings

        if options.is_force or need_to_run_configure_rabbitmq(
                [settings.RABBITMQ_PASSWORD]):
            run(["scripts/setup/configure-rabbitmq"])
            write_new_digest(
                'last_configure_rabbitmq_hash',
                configure_rabbitmq_paths(),
                [settings.RABBITMQ_PASSWORD]
            )
        else:
            print("No need to run `scripts/setup/configure-rabbitmq.")

        dev_template_db_status = DEV_DATABASE.template_status()
        if options.is_force or dev_template_db_status == 'needs_rebuild':
            run(["tools/setup/postgres-init-dev-db"])
            run(["tools/rebuild-dev-database"])
            DEV_DATABASE.write_new_db_digest()
        elif dev_template_db_status == 'run_migrations':
            DEV_DATABASE.run_db_migrations()
        elif dev_template_db_status == 'current':
            print("No need to regenerate the dev DB.")

        test_template_db_status = TEST_DATABASE.template_status()
        if options.is_force or test_template_db_status == 'needs_rebuild':
            run(["tools/setup/postgres-init-test-db"])
            run(["tools/rebuild-test-database"])
            TEST_DATABASE.write_new_db_digest()
        elif test_template_db_status == 'run_migrations':
            TEST_DATABASE.run_db_migrations()
        elif test_template_db_status == 'current':
            print("No need to regenerate the test DB.")

        if options.is_force or need_to_run_compilemessages():
            run(["./manage.py", "compilemessages"])
            write_new_digest(
                "last_compilemessages_hash",
                compilemessages_paths(),
            )
        else:
            print("No need to run `manage.py compilemessages`.")

        destroyed = destroy_leaked_test_databases()
        if destroyed:
            print("Dropped %s stale test databases!" % (destroyed,))

    clean_unused_caches()

    # Keeping this cache file around can cause eslint to throw
    # random TypeErrors when new/updated dependencies are added
    if os.path.isfile('.eslintcache'):
        # Remove this block when
        # https://github.com/eslint/eslint/issues/11639 is fixed
        # upstream.
        os.remove('.eslintcache')

    # Clean up the root of the `var/` directory for various
    # testing-related files that we have migrated to
    # `var/<uuid>/test-backend`.
    print("Cleaning var/ directory files...")
    var_paths = glob.glob('var/test*')
    var_paths.append('var/bot_avatar')
    for path in var_paths:
        try:
            if os.path.isdir(path):
                shutil.rmtree(path)
            else:
                os.remove(path)
        except FileNotFoundError:
            pass

    version_file = os.path.join(UUID_VAR_PATH, 'provision_version')
    print('writing to %s\n' % (version_file,))
    open(version_file, 'w').write(PROVISION_VERSION + '\n')

    print()
    print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
    return 0
Beispiel #43
0
def main(options):
    # type: (Any) -> int

    # yarn and management commands expect to be run from the root of the
    # project.
    os.chdir(ZULIP_PATH)

    # hash the apt dependencies
    sha_sum = hashlib.sha1()

    for apt_depedency in SYSTEM_DEPENDENCIES[codename]:
        sha_sum.update(apt_depedency.encode('utf8'))
    # hash the content of setup-apt-repo
    sha_sum.update(open('scripts/lib/setup-apt-repo', 'rb').read())

    new_apt_dependencies_hash = sha_sum.hexdigest()
    last_apt_dependencies_hash = None
    apt_hash_file_path = os.path.join(UUID_VAR_PATH, "apt_dependencies_hash")
    with open(apt_hash_file_path, 'a+') as hash_file:
        hash_file.seek(0)
        last_apt_dependencies_hash = hash_file.read()

    if (new_apt_dependencies_hash != last_apt_dependencies_hash):
        try:
            install_system_deps()
        except subprocess.CalledProcessError:
            # Might be a failure due to network connection issues. Retrying...
            install_system_deps(retry=True)
        with open(apt_hash_file_path, 'w') as hash_file:
            hash_file.write(new_apt_dependencies_hash)
    else:
        print("No changes to apt dependencies, so skipping apt operations.")

    # Here we install node.
    proxy_env = [
        "env",
        "http_proxy=" + os.environ.get("http_proxy", ""),
        "https_proxy=" + os.environ.get("https_proxy", ""),
        "no_proxy=" + os.environ.get("no_proxy", ""),
    ]
    run(["sudo", "-H"] + proxy_env + ["scripts/lib/install-node"])

    # This is a wrapper around `yarn`, which we run last since
    # it can often fail due to network issues beyond our control.
    try:
        # Hack: We remove `node_modules` as root to work around an
        # issue with the symlinks being improperly owned by root.
        if os.path.islink("node_modules"):
            run(["sudo", "rm", "-f", "node_modules"])
        run(["sudo", "mkdir", "-p", NODE_MODULES_CACHE_PATH])
        run([
            "sudo", "chown",
            "%s:%s" % (user_id, user_id), NODE_MODULES_CACHE_PATH
        ])
        setup_node_modules(prefer_offline=True)
    except subprocess.CalledProcessError:
        print(WARNING + "`yarn install` failed; retrying..." + ENDC)
        setup_node_modules()

    # Install shellcheck.
    run(["sudo", "scripts/lib/install-shellcheck"])

    # Import tools/setup_venv.py instead of running it so that we get an
    # activated virtualenv for the rest of the provisioning process.
    from tools.setup import setup_venvs
    setup_venvs.main()

    setup_shell_profile('~/.bash_profile')
    setup_shell_profile('~/.zprofile')

    run(["sudo", "cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])

    # create log directory `zulip/var/log`
    os.makedirs(LOG_DIR_PATH, exist_ok=True)
    # create upload directory `var/uploads`
    os.makedirs(UPLOAD_DIR_PATH, exist_ok=True)
    # create test upload directory `var/test_upload`
    os.makedirs(TEST_UPLOAD_DIR_PATH, exist_ok=True)
    # create coverage directory`var/coverage`
    os.makedirs(COVERAGE_DIR_PATH, exist_ok=True)
    # create linecoverage directory`var/node-coverage`
    os.makedirs(NODE_TEST_COVERAGE_DIR_PATH, exist_ok=True)

    # `build_emoji` script requires `emoji-datasource` package which we install
    # via npm and hence it should be executed after we are done installing npm
    # packages.
    if not os.path.isdir(EMOJI_CACHE_PATH):
        run(["sudo", "mkdir", EMOJI_CACHE_PATH])
    run(["sudo", "chown", "%s:%s" % (user_id, user_id), EMOJI_CACHE_PATH])
    run(["tools/setup/emoji/build_emoji"])

    # copy over static files from the zulip_bots package
    generate_zulip_bots_static_files()

    webfont_paths = [
        "tools/setup/generate-custom-icon-webfont",
        "static/icons/fonts/template.hbs"
    ]
    webfont_paths += glob.glob('static/assets/icons/*')
    if file_or_package_hash_updated(webfont_paths, "webfont_files_hash",
                                    options.is_force):
        run(["tools/setup/generate-custom-icon-webfont"])
    else:
        print("No need to run `tools/setup/generate-custom-icon-webfont`.")

    build_pygments_data_paths = [
        "tools/setup/build_pygments_data", "tools/setup/lang.json"
    ]
    from pygments import __version__ as pygments_version
    if file_or_package_hash_updated(build_pygments_data_paths,
                                    "build_pygments_data_hash",
                                    options.is_force, [pygments_version]):
        run(["tools/setup/build_pygments_data"])
    else:
        print("No need to run `tools/setup/build_pygments_data`.")

    run(["scripts/setup/generate_secrets.py", "--development"])

    update_authors_json_paths = [
        "tools/update-authors-json", "zerver/tests/fixtures/authors.json"
    ]
    if file_or_package_hash_updated(update_authors_json_paths,
                                    "update_authors_json_hash",
                                    options.is_force):
        run(["tools/update-authors-json", "--use-fixture"])
    else:
        print("No need to run `tools/update-authors-json`.")

    email_source_paths = [
        "tools/inline-email-css", "templates/zerver/emails/email.css"
    ]
    email_source_paths += glob.glob('templates/zerver/emails/*.source.html')
    if file_or_package_hash_updated(email_source_paths,
                                    "last_email_source_files_hash",
                                    options.is_force):
        run(["tools/inline-email-css"])
    else:
        print("No need to run `tools/inline-email-css`.")

    if is_circleci or (is_travis and not options.is_production_travis):
        run(["sudo", "service", "rabbitmq-server", "restart"])
        run(["sudo", "service", "redis-server", "restart"])
        run(["sudo", "service", "memcached", "restart"])
        run(["sudo", "service", "postgresql", "restart"])
    elif vendor == 'CentOS':
        for service in [
                "postgresql-%s" % (POSTGRES_VERSION, ), "rabbitmq server",
                "memcached", "redis"
        ]:
            run(["sudo", "-H", "systemctl", "enable", service])
            run(["sudo", "-H", "systemctl", "start", service])
    elif options.is_docker:
        run(["sudo", "service", "rabbitmq-server", "restart"])
        run(["sudo", "pg_dropcluster", "--stop", POSTGRES_VERSION, "main"])
        run([
            "sudo", "pg_createcluster", "-e", "utf8", "--start",
            POSTGRES_VERSION, "main"
        ])
        run(["sudo", "service", "redis-server", "restart"])
        run(["sudo", "service", "memcached", "restart"])
    if not options.is_production_travis:
        # The following block is skipped for the production Travis
        # suite, because that suite doesn't make use of these elements
        # of the development environment (it just uses the development
        # environment to build a release tarball).

        # Need to set up Django before using template_database_status
        os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
        import django
        django.setup()

        from zerver.lib.test_fixtures import template_database_status, run_db_migrations

        try:
            from zerver.lib.queue import SimpleQueueClient
            SimpleQueueClient()
            rabbitmq_is_configured = True
        except Exception:
            rabbitmq_is_configured = False

        if options.is_force or not rabbitmq_is_configured:
            run(["scripts/setup/configure-rabbitmq"])
        else:
            print("RabbitMQ is already configured.")

        migration_status_path = os.path.join(UUID_VAR_PATH,
                                             "migration_status_dev")
        dev_template_db_status = template_database_status(
            migration_status=migration_status_path,
            settings="zproject.settings",
            database_name="zulip",
        )
        if options.is_force or dev_template_db_status == 'needs_rebuild':
            run(["tools/setup/postgres-init-dev-db"])
            run(["tools/do-destroy-rebuild-database"])
        elif dev_template_db_status == 'run_migrations':
            run_db_migrations('dev')
        elif dev_template_db_status == 'current':
            print("No need to regenerate the dev DB.")

        test_template_db_status = template_database_status()
        if options.is_force or test_template_db_status == 'needs_rebuild':
            run(["tools/setup/postgres-init-test-db"])
            run(["tools/do-destroy-rebuild-test-database"])
        elif test_template_db_status == 'run_migrations':
            run_db_migrations('test')
        elif test_template_db_status == 'current':
            print("No need to regenerate the test DB.")

        # Consider updating generated translations data: both `.mo`
        # files and `language-options.json`.
        paths = ['zerver/management/commands/compilemessages.py']
        paths += glob.glob('static/locale/*/LC_MESSAGES/*.po')
        paths += glob.glob('static/locale/*/translations.json')

        if file_or_package_hash_updated(paths, "last_compilemessages_hash",
                                        options.is_force):
            run(["./manage.py", "compilemessages"])
        else:
            print("No need to run `manage.py compilemessages`.")

    run(["scripts/lib/clean-unused-caches"])

    version_file = os.path.join(UUID_VAR_PATH, 'provision_version')
    print('writing to %s\n' % (version_file, ))
    open(version_file, 'w').write(PROVISION_VERSION + '\n')

    print()
    print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
    return 0
Beispiel #44
0
def do_setup_virtualenv(venv_path, requirements_file, virtualenv_args):
    # type: (str, str, List[str]) -> None

    # Setup Python virtualenv
    new_packages = set(get_package_names(requirements_file))

    run(["sudo", "rm", "-rf", venv_path])
    if not try_to_copy_venv(venv_path, new_packages):
        # Create new virtualenv.
        run(["sudo", "mkdir", "-p", venv_path])
        run(["sudo", "virtualenv"] + virtualenv_args + [venv_path])
        run([
            "sudo", "chown", "-R", "{}:{}".format(os.getuid(), os.getgid()),
            venv_path
        ])
        create_log_entry(get_logfile_name(venv_path), "", set(), new_packages)

    create_requirements_index_file(venv_path, requirements_file)
    # Switch current Python context to the virtualenv.
    activate_this = os.path.join(venv_path, "bin", "activate_this.py")
    exec(open(activate_this).read(), {}, dict(__file__=activate_this)
         )  # type: ignore # https://github.com/python/mypy/issues/1577

    run(["pip", "install", "-U", "setuptools"])
    run(["pip", "install", "--upgrade", "pip", "wheel"])
    run(["pip", "install", "--no-deps", "--requirement", requirements_file])
    run(["sudo", "chmod", "-R", "a+rX", venv_path])
Beispiel #45
0
def try_to_copy_venv(venv_path, new_packages):
    # type: (str, Set[str]) -> bool
    """
    Tries to copy packages from an old virtual environment in the cache
    to the new virtual environment. The algorithm works as follows:
        1. Find a virtual environment, v, from the cache that has the
        highest overlap with the new requirements such that:
            a. The new requirements only add to the packages of v.
            b. The new requirements only upgrade packages of v.
        2. Copy the contents of v to the new virtual environment using
        virtualenv-clone.
        3. Delete all .pyc files in the new virtual environment.
    """
    if not os.path.exists(VENV_CACHE_PATH):
        return False

    venv_name = os.path.basename(venv_path)

    overlaps = []  # type: List[Tuple[int, str, Set[str]]]
    old_packages = set()  # type: Set[str]
    for sha1sum in os.listdir(VENV_CACHE_PATH):
        curr_venv_path = os.path.join(VENV_CACHE_PATH, sha1sum, venv_name)
        if (curr_venv_path == venv_path
                or not os.path.exists(get_index_filename(curr_venv_path))):
            continue

        old_packages = get_venv_packages(curr_venv_path)
        # We only consider using using old virtualenvs that only
        # contain packages that we want in our new virtualenv.
        if not (old_packages - new_packages):
            overlap = new_packages & old_packages
            overlaps.append((len(overlap), curr_venv_path, overlap))

    target_log = get_logfile_name(venv_path)
    source_venv_path = None
    if overlaps:
        # Here, we select the old virtualenv with the largest overlap
        overlaps = sorted(overlaps)
        _, source_venv_path, copied_packages = overlaps[-1]
        print('Copying packages from {}'.format(source_venv_path))
        clone_ve = "{}/bin/virtualenv-clone".format(source_venv_path)
        cmd = "sudo {exe} {source} {target}".format(exe=clone_ve,
                                                    source=source_venv_path,
                                                    target=venv_path).split()
        try:
            run(cmd)
        except Exception:
            # Virtualenv-clone is not installed. Install it and try running
            # the command again.
            try:
                run("{}/bin/pip install --no-deps virtualenv-clone".format(
                    source_venv_path).split())
                run(cmd)
            except Exception:
                # virtualenv-clone isn't working, so just make a new venv
                return False

        run([
            "sudo", "chown", "-R", "{}:{}".format(os.getuid(), os.getgid()),
            venv_path
        ])
        source_log = get_logfile_name(source_venv_path)
        copy_parent_log(source_log, target_log)
        create_log_entry(target_log, source_venv_path, copied_packages,
                         new_packages - copied_packages)
        return True

    return False
Beispiel #46
0
    def handle(self, *args: Any, **options: Any) -> None:
        timestamp = timezone_now().strftime(TIMESTAMP_FORMAT)

        with tempfile.TemporaryDirectory(
            prefix="zulip-backup-%s-" % (timestamp,)
        ) as tmp:
            os.mkdir(os.path.join(tmp, "zulip-backup"))
            members = []

            with open(os.path.join(tmp, "zulip-backup", "zulip-version"), "w") as f:
                print(ZULIP_VERSION, file=f)
                git = try_git_describe()
                if git:
                    print(git, file=f)
            members.append("zulip-backup/zulip-version")

            with open(os.path.join(tmp, "zulip-backup", "os-version"), "w") as f:
                print(
                    "{DISTRIB_ID} {DISTRIB_CODENAME}".format(**parse_lsb_release()),
                    file=f,
                )
            members.append("zulip-backup/os-version")

            with open(os.path.join(tmp, "zulip-backup", "postgres-version"), "w") as f:
                print(connection.pg_version, file=f)
            members.append("zulip-backup/postgres-version")

            if settings.DEVELOPMENT:
                os.symlink(
                    os.path.join(settings.DEPLOY_ROOT, "zproject"),
                    os.path.join(tmp, "zulip-backup", "zproject"),
                )
                members.append("zulip-backup/zproject/dev-secrets.conf")
            else:
                os.symlink("/etc/zulip", os.path.join(tmp, "zulip-backup", "settings"))
                members.append("zulip-backup/settings")

            db_name = settings.DATABASES["default"]["NAME"]
            db_dir = os.path.join(tmp, "zulip-backup", "database")
            run(["pg_dump", "--format=directory", db_name, "--file", db_dir])
            members.append("zulip-backup/database")

            if settings.LOCAL_UPLOADS_DIR is not None and os.path.exists(
                os.path.join(settings.DEPLOY_ROOT, settings.LOCAL_UPLOADS_DIR)
            ):
                os.symlink(
                    os.path.join(settings.DEPLOY_ROOT, settings.LOCAL_UPLOADS_DIR),
                    os.path.join(tmp, "zulip-backup", "uploads"),
                )
                members.append("zulip-backup/uploads")

            try:
                if options["output"] is None:
                    tarball_path = tempfile.NamedTemporaryFile(
                        prefix="zulip-backup-%s-" % (timestamp,),
                        suffix=".tar.gz",
                        delete=False,
                    ).name
                else:
                    tarball_path = options["output"]

                run(["tar", "-C", tmp, "-chzf", tarball_path, "--"] + members)
                print("Backup tarball written to %s" % (tarball_path,))
            except BaseException:
                if options["output"] is None:
                    os.unlink(tarball_path)
                raise
Beispiel #47
0
def install_venv_deps(pip: str, requirements_file: str, python2: bool) -> None:
    pip_requirements = os.path.join(ZULIP_PATH, "requirements", "pip2.txt" if python2 else "pip.txt")
    run([pip, "install", "--force-reinstall", "--require-hashes", "--requirement", pip_requirements])
    run([pip, "install", "--no-deps", "--require-hashes", "--requirement", requirements_file])
Beispiel #48
0
def install_venv_deps(requirements_file):
    # type: (str) -> None
    pip_requirements = os.path.join(ZULIP_PATH, "requirements", "pip.txt")
    run(["pip", "install", "-U", "--requirement", pip_requirements])
    run(["pip", "install", "--no-deps", "--requirement", requirements_file])
Beispiel #49
0
def try_to_copy_venv(venv_path, new_packages):
    # type: (str, Set[str]) -> bool
    """
    Tries to copy packages from an old virtual environment in the cache
    to the new virtual environment. The algorithm works as follows:
        1. Find a virtual environment, v, from the cache that has the
        highest overlap with the new requirements such that:
            a. The new requirements only add to the packages of v.
            b. The new requirements only upgrade packages of v.
        2. Copy the contents of v to the new virtual environment using
        virtualenv-clone.
        3. Delete all .pyc files in the new virtual environment.
    """
    if not os.path.exists(VENV_CACHE_PATH):
        return False

    venv_name = os.path.basename(venv_path)

    overlaps = []  # type: List[Tuple[int, str, Set[str]]]
    old_packages = set()  # type: Set[str]
    for sha1sum in os.listdir(VENV_CACHE_PATH):
        curr_venv_path = os.path.join(VENV_CACHE_PATH, sha1sum, venv_name)
        if (curr_venv_path == venv_path
                or not os.path.exists(get_index_filename(curr_venv_path))):
            continue

        old_packages = get_venv_packages(curr_venv_path)
        # We only consider using using old virtualenvs that only
        # contain packages that we want in our new virtualenv.
        if not (old_packages - new_packages):
            overlap = new_packages & old_packages
            overlaps.append((len(overlap), curr_venv_path, overlap))

    target_log = get_logfile_name(venv_path)
    source_venv_path = None
    if overlaps:
        # Here, we select the old virtualenv with the largest overlap
        overlaps = sorted(overlaps)
        _, source_venv_path, copied_packages = overlaps[-1]
        print('Copying packages from {}'.format(source_venv_path))
        clone_ve = "{}/bin/virtualenv-clone".format(source_venv_path)
        cmd = "sudo {exe} {source} {target}".format(exe=clone_ve,
                                                    source=source_venv_path,
                                                    target=venv_path).split()
        try:
            # TODO: We can probably remove this in a few months, now
            # that we can expect that virtualenv-clone is present in
            # all of our recent virtualenvs.
            run(cmd)
        except Exception:
            # Virtualenv-clone is not installed. Install it and try running
            # the command again.
            try:
                run("{}/bin/pip install --no-deps virtualenv-clone".format(
                    source_venv_path).split())
                run(cmd)
            except Exception:
                # virtualenv-clone isn't working, so just make a new venv
                return False

        # virtualenv-clone, unfortunately, copies the success stamp,
        # which means if the upcoming `pip install` phase were to
        # fail, we'd end up with a broken half-provisioned virtualenv
        # that's incorrectly tagged as properly provisioned.  The
        # right fix is to use
        # https://github.com/edwardgeorge/virtualenv-clone/pull/38,
        # but this rm is almost as good.
        success_stamp_path = os.path.join(venv_path, 'success-stamp')
        run_as_root(["rm", "-f", success_stamp_path])

        run_as_root([
            "chown", "-R", "{}:{}".format(os.getuid(), os.getgid()), venv_path
        ])
        source_log = get_logfile_name(source_venv_path)
        copy_parent_log(source_log, target_log)
        create_log_entry(target_log, source_venv_path, copied_packages,
                         new_packages - copied_packages)
        return True

    return False
Beispiel #50
0
def add_cert_to_pipconf():
    # type: () -> None
    conffile = os.path.expanduser("~/.pip/pip.conf")
    confdir = os.path.expanduser("~/.pip/")
    os.makedirs(confdir, exist_ok=True)
    run(["crudini", "--set", conffile, "global", "cert", os.environ["CUSTOM_CA_CERTIFICATES"]])
Beispiel #51
0
def main(options: argparse.Namespace) -> int:
    setup_bash_profile()
    setup_shell_profile('~/.zprofile')

    # This needs to happen before anything that imports zproject.settings.
    run(["scripts/setup/generate_secrets.py", "--development"])

    create_var_directories()

    # The `build_emoji` script requires `emoji-datasource` package
    # which we install via npm; thus this step is after installing npm
    # packages.
    run(["tools/setup/emoji/build_emoji"])

    # copy over static files from the zulip_bots package
    generate_zulip_bots_static_files()

    if options.is_force or need_to_run_build_pygments_data():
        run(["tools/setup/build_pygments_data"])
    else:
        print("No need to run `tools/setup/build_pygments_data`.")

    if options.is_force or need_to_run_inline_email_css():
        run(["scripts/setup/inline_email_css.py"])
    else:
        print("No need to run `scripts/setup/inline_email_css.py`.")

    if not options.is_production_travis:
        # The following block is skipped for the production Travis
        # suite, because that suite doesn't make use of these elements
        # of the development environment (it just uses the development
        # environment to build a release tarball).

        # Need to set up Django before using template_database_status
        os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
        import django
        django.setup()

        from zerver.lib.test_fixtures import template_database_status, run_db_migrations, \
            destroy_leaked_test_databases

        try:
            from zerver.lib.queue import SimpleQueueClient
            SimpleQueueClient()
            rabbitmq_is_configured = True
        except Exception:
            rabbitmq_is_configured = False

        if options.is_force or not rabbitmq_is_configured:
            run(["scripts/setup/configure-rabbitmq"])
        else:
            print("No need to run `scripts/setup/configure-rabbitmq.")

        dev_template_db_status = template_database_status('dev')
        if options.is_force or dev_template_db_status == 'needs_rebuild':
            run(["tools/setup/postgres-init-dev-db"])
            run(["tools/do-destroy-rebuild-database"])
        elif dev_template_db_status == 'run_migrations':
            run_db_migrations('dev')
        elif dev_template_db_status == 'current':
            print("No need to regenerate the dev DB.")

        test_template_db_status = template_database_status('test')
        if options.is_force or test_template_db_status == 'needs_rebuild':
            run(["tools/setup/postgres-init-test-db"])
            run(["tools/do-destroy-rebuild-test-database"])
        elif test_template_db_status == 'run_migrations':
            run_db_migrations('test')
        elif test_template_db_status == 'current':
            print("No need to regenerate the test DB.")

        if options.is_force or need_to_run_compilemessages():
            run(["./manage.py", "compilemessages"])
        else:
            print("No need to run `manage.py compilemessages`.")

        destroyed = destroy_leaked_test_databases()
        if destroyed:
            print("Dropped %s stale test databases!" % (destroyed, ))

    run(["scripts/lib/clean-unused-caches", "--threshold=6"])

    # Keeping this cache file around can cause eslint to throw
    # random TypeErrors when new/updated dependencies are added
    if os.path.isfile('.eslintcache'):
        # Remove this block when
        # https://github.com/eslint/eslint/issues/11639 is fixed
        # upstream.
        os.remove('.eslintcache')

    # Clean up the root of the `var/` directory for various
    # testing-related files that we have migrated to
    # `var/<uuid>/test-backend`.
    print("Cleaning var/ directory files...")
    var_paths = glob.glob('var/test*')
    var_paths.append('var/bot_avatar')
    for path in var_paths:
        try:
            if os.path.isdir(path):
                shutil.rmtree(path)
            else:
                os.remove(path)
        except FileNotFoundError:
            pass

    version_file = os.path.join(UUID_VAR_PATH, 'provision_version')
    print('writing to %s\n' % (version_file, ))
    open(version_file, 'w').write(PROVISION_VERSION + '\n')

    print()
    print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
    return 0
Beispiel #52
0
def main(options):
    # type: (Any) -> int

    # yarn and management commands expect to be run from the root of the
    # project.
    os.chdir(ZULIP_PATH)

    # hash the apt dependencies
    sha_sum = hashlib.sha1()

    for apt_depedency in SYSTEM_DEPENDENCIES:
        sha_sum.update(apt_depedency.encode('utf8'))
    if vendor in ["Ubuntu", "Debian"]:
        sha_sum.update(open('scripts/lib/setup-apt-repo', 'rb').read())
    else:
        # hash the content of setup-yum-repo and build-*
        sha_sum.update(open('scripts/lib/setup-yum-repo', 'rb').read())
        build_paths = glob.glob("scripts/lib/build-")
        for bp in build_paths:
            sha_sum.update(open(bp, 'rb').read())

    new_apt_dependencies_hash = sha_sum.hexdigest()
    last_apt_dependencies_hash = None
    apt_hash_file_path = os.path.join(UUID_VAR_PATH, "apt_dependencies_hash")
    with open(apt_hash_file_path, 'a+') as hash_file:
        hash_file.seek(0)
        last_apt_dependencies_hash = hash_file.read()

    if (new_apt_dependencies_hash != last_apt_dependencies_hash):
        try:
            install_system_deps()
        except subprocess.CalledProcessError:
            # Might be a failure due to network connection issues. Retrying...
            install_system_deps(retry=True)
        with open(apt_hash_file_path, 'w') as hash_file:
            hash_file.write(new_apt_dependencies_hash)
    else:
        print("No changes to apt dependencies, so skipping apt operations.")

    # Here we install node.
    proxy_env = [
        "env",
        "http_proxy=" + os.environ.get("http_proxy", ""),
        "https_proxy=" + os.environ.get("https_proxy", ""),
        "no_proxy=" + os.environ.get("no_proxy", ""),
    ]
    run_as_root(proxy_env + ["scripts/lib/install-node"], sudo_args = ['-H'])

    # This is a wrapper around `yarn`, which we run last since
    # it can often fail due to network issues beyond our control.
    try:
        # Hack: We remove `node_modules` as root to work around an
        # issue with the symlinks being improperly owned by root.
        if os.path.islink("node_modules"):
            run_as_root(["rm", "-f", "node_modules"])
        run_as_root(["mkdir", "-p", NODE_MODULES_CACHE_PATH])
        run_as_root(["chown", "%s:%s" % (user_id, user_id), NODE_MODULES_CACHE_PATH])
        setup_node_modules(prefer_offline=True)
    except subprocess.CalledProcessError:
        print(WARNING + "`yarn install` failed; retrying..." + ENDC)
        setup_node_modules()

    # Install shellcheck.
    run_as_root(["scripts/lib/install-shellcheck"])

    from tools.setup import setup_venvs
    setup_venvs.main()

    activate_this = "/srv/zulip-py3-venv/bin/activate_this.py"
    exec(open(activate_this).read(), {}, dict(__file__=activate_this))

    setup_shell_profile('~/.bash_profile')
    setup_shell_profile('~/.zprofile')

    # This needs to happen before anything that imports zproject.settings.
    run(["scripts/setup/generate_secrets.py", "--development"])

    run_as_root(["cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])

    # create log directory `zulip/var/log`
    os.makedirs(LOG_DIR_PATH, exist_ok=True)
    # create upload directory `var/uploads`
    os.makedirs(UPLOAD_DIR_PATH, exist_ok=True)
    # create test upload directory `var/test_upload`
    os.makedirs(TEST_UPLOAD_DIR_PATH, exist_ok=True)
    # create coverage directory `var/coverage`
    os.makedirs(COVERAGE_DIR_PATH, exist_ok=True)
    # create linecoverage directory `var/node-coverage`
    os.makedirs(NODE_TEST_COVERAGE_DIR_PATH, exist_ok=True)

    # The `build_emoji` script requires `emoji-datasource` package
    # which we install via npm; thus this step is after installing npm
    # packages.
    if not os.path.isdir(EMOJI_CACHE_PATH):
        run_as_root(["mkdir", EMOJI_CACHE_PATH])
    run_as_root(["chown", "%s:%s" % (user_id, user_id), EMOJI_CACHE_PATH])
    run(["tools/setup/emoji/build_emoji"])

    # copy over static files from the zulip_bots package
    generate_zulip_bots_static_files()

    webfont_paths = ["tools/setup/generate-custom-icon-webfont", "static/icons/fonts/template.hbs"]
    webfont_paths += glob.glob('static/assets/icons/*')
    if file_or_package_hash_updated(webfont_paths, "webfont_files_hash", options.is_force):
        run(["tools/setup/generate-custom-icon-webfont"])
    else:
        print("No need to run `tools/setup/generate-custom-icon-webfont`.")

    build_pygments_data_paths = ["tools/setup/build_pygments_data", "tools/setup/lang.json"]
    from pygments import __version__ as pygments_version
    if file_or_package_hash_updated(build_pygments_data_paths, "build_pygments_data_hash", options.is_force,
                                    [pygments_version]):
        run(["tools/setup/build_pygments_data"])
    else:
        print("No need to run `tools/setup/build_pygments_data`.")

    update_authors_json_paths = ["tools/update-authors-json", "zerver/tests/fixtures/authors.json"]
    if file_or_package_hash_updated(update_authors_json_paths, "update_authors_json_hash", options.is_force):
        run(["tools/update-authors-json", "--use-fixture"])
    else:
        print("No need to run `tools/update-authors-json`.")

    email_source_paths = ["tools/inline-email-css", "templates/zerver/emails/email.css"]
    email_source_paths += glob.glob('templates/zerver/emails/*.source.html')
    if file_or_package_hash_updated(email_source_paths, "last_email_source_files_hash", options.is_force):
        run(["tools/inline-email-css"])
    else:
        print("No need to run `tools/inline-email-css`.")

    if is_circleci or (is_travis and not options.is_production_travis):
        run_as_root(["service", "rabbitmq-server", "restart"])
        run_as_root(["service", "redis-server", "restart"])
        run_as_root(["service", "memcached", "restart"])
        run_as_root(["service", "postgresql", "restart"])
    elif family == 'redhat':
        for service in ["postgresql-%s" % (POSTGRES_VERSION,), "rabbitmq-server", "memcached", "redis"]:
            run_as_root(["systemctl", "enable", service], sudo_args = ['-H'])
            run_as_root(["systemctl", "start", service], sudo_args = ['-H'])
    elif options.is_docker:
        run_as_root(["service", "rabbitmq-server", "restart"])
        run_as_root(["pg_dropcluster", "--stop", POSTGRES_VERSION, "main"])
        run_as_root(["pg_createcluster", "-e", "utf8", "--start", POSTGRES_VERSION, "main"])
        run_as_root(["service", "redis-server", "restart"])
        run_as_root(["service", "memcached", "restart"])
    if not options.is_production_travis:
        # The following block is skipped for the production Travis
        # suite, because that suite doesn't make use of these elements
        # of the development environment (it just uses the development
        # environment to build a release tarball).

        # Need to set up Django before using template_database_status
        os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
        import django
        django.setup()

        from zerver.lib.test_fixtures import template_database_status, run_db_migrations

        try:
            from zerver.lib.queue import SimpleQueueClient
            SimpleQueueClient()
            rabbitmq_is_configured = True
        except Exception:
            rabbitmq_is_configured = False

        if options.is_force or not rabbitmq_is_configured:
            run(["scripts/setup/configure-rabbitmq"])
        else:
            print("RabbitMQ is already configured.")

        migration_status_path = os.path.join(UUID_VAR_PATH, "migration_status_dev")
        dev_template_db_status = template_database_status(
            migration_status=migration_status_path,
            settings="zproject.settings",
            database_name="zulip",
        )
        if options.is_force or dev_template_db_status == 'needs_rebuild':
            run(["tools/setup/postgres-init-dev-db"])
            run(["tools/do-destroy-rebuild-database"])
        elif dev_template_db_status == 'run_migrations':
            run_db_migrations('dev')
        elif dev_template_db_status == 'current':
            print("No need to regenerate the dev DB.")

        test_template_db_status = template_database_status()
        if options.is_force or test_template_db_status == 'needs_rebuild':
            run(["tools/setup/postgres-init-test-db"])
            run(["tools/do-destroy-rebuild-test-database"])
        elif test_template_db_status == 'run_migrations':
            run_db_migrations('test')
        elif test_template_db_status == 'current':
            print("No need to regenerate the test DB.")

        # Consider updating generated translations data: both `.mo`
        # files and `language-options.json`.
        paths = ['zerver/management/commands/compilemessages.py']
        paths += glob.glob('static/locale/*/LC_MESSAGES/*.po')
        paths += glob.glob('static/locale/*/translations.json')

        if file_or_package_hash_updated(paths, "last_compilemessages_hash", options.is_force):
            run(["./manage.py", "compilemessages"])
        else:
            print("No need to run `manage.py compilemessages`.")

    run(["scripts/lib/clean-unused-caches"])

    version_file = os.path.join(UUID_VAR_PATH, 'provision_version')
    print('writing to %s\n' % (version_file,))
    open(version_file, 'w').write(PROVISION_VERSION + '\n')

    print()
    print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
    return 0
Beispiel #53
0
def do_setup_virtualenv(venv_path, requirements_file, virtualenv_args):
    # type: (str, str, List[str]) -> None

    # Setup Python virtualenv
    run(["sudo", "rm", "-rf", venv_path])
    run(["sudo", "mkdir", "-p", venv_path])
    run(["sudo", "chown", "{}:{}".format(os.getuid(), os.getgid()), venv_path])
    run(["virtualenv"] + virtualenv_args + [venv_path])

    # Switch current Python context to the virtualenv.
    activate_this = os.path.join(venv_path, "bin", "activate_this.py")
    exec(open(activate_this).read(), {}, dict(__file__=activate_this)
         )  # type: ignore # https://github.com/python/mypy/issues/1577

    run(["pip", "install", "-U", "setuptools"]),
    run(["pip", "install", "--upgrade", "pip", "wheel"])
    run(["pip", "install", "--no-deps", "--requirement", requirements_file])
    run(["sudo", "chmod", "-R", "a+rX", venv_path])