Exemplo n.º 1
0
def clone(online_url,
          path,
          branch='master',
          do=False,
          check=True,
          expand_path=True):

    if expand_path:
        local_path = os.path.join(os.curdir, SUBMODULES_DIR, path)
    else:
        local_path = path
    local_path_exists = os.path.exists(local_path)

    if local_path_exists:
        log.debug("Path {} already exists", local_path)
        gitobj = Repo(local_path)
    elif do:
        gitobj = Repo.clone_from(url=online_url, to_path=local_path)
        log.info("Cloned repo {}@{} as {}", online_url, branch, path)
    else:
        log.exit("Repo {} missing as {}. You should init your project".format(
            online_url, local_path))

    if do:
        switch_branch(gitobj, branch)

    if check:
        compare_repository(gitobj, branch, online_url=online_url, path=path)

    return gitobj
Exemplo n.º 2
0
 def exec_command(self,
                  service,
                  user=None,
                  command=None,
                  disable_tty=False,
                  nofailure=False):
     """
         Execute a command on a running container
     """
     shell_command, shell_args = self.split_command(command)
     options = {
         'SERVICE': service,
         'COMMAND': shell_command,
         'ARGS': shell_args,
         '--index': '1',
         '--user': user,
         '-T': disable_tty,
         '--env': None,
         '--workdir': None,
         # '-d': False,
         '--detach': False,
         '--privileged': False,
     }
     if shell_command is not None:
         log.debug("Command: {}({}+{})", service.lower(), shell_command,
                   shell_args)
     try:
         out = self.command('exec_command', options, nofailure=nofailure)
     except NoSuchService:
         if nofailure:
             raise AttributeError("Cannot find service: {}".format(service))
         else:
             log.exit("Cannot find a running container called {}", service)
     else:
         return out
Exemplo n.º 3
0
def get_current_gid() -> int:
    try:
        return os.getgid()
    # Can fail on Windows
    except AttributeError as e:  # pragma: no cover
        log.debug(e)
        return 0
Exemplo n.º 4
0
def clone(
    url: str, path: Path, branch: str, do: bool = False, check: bool = True
) -> Repo:

    local_path = SUBMODULES_DIR.joinpath(path)

    if local_path.exists():
        log.debug("Path {} already exists", local_path)
        gitobj = Repo(local_path)
    elif do:
        gitobj = Repo.clone_from(url=url, to_path=local_path)
        log.info("Cloned {}@{} as {}", url, branch, path)
    else:
        print_and_exit(
            "Repo {} missing as {}. You should init your project",
            url,
            local_path,
        )

    if do:
        ret = switch_branch(gitobj, branch)
        if not ret:  # pragma: no cover
            print_and_exit("Cannot switch repo {} to version {}", local_path, branch)

    if check:
        compare_repository(gitobj, branch, online_url=url)

    return gitobj
Exemplo n.º 5
0
    def check_installed_software() -> None:

        log.debug(
            "python version: {}.{}.{}",
            sys.version_info.major,
            sys.version_info.minor,
            sys.version_info.micro,
        )

        # 17.05 added support for multi-stage builds
        # https://docs.docker.com/compose/compose-file/compose-file-v3/#compose-and-docker-compatibility-matrix
        # 18.09.2 fixed the CVE-2019-5736 vulnerability
        # 20.10.0 introduced copy --chmod and improved logging
        Packages.check_program("docker",
                               min_version="20.10.0",
                               min_recommended_version="20.10.0")

        if docker.compose.is_installed():
            # too slow to verify the version on every commands... near half a seconds
            # Sometimes a couple of seconds!
            # v = docker.compose.version()
            # log.debug("docker compose is installed: {}", v)
            log.debug("docker compose is installed")
        else:  # pragma: no cover
            print_and_exit(
                "A mandatory dependency is missing: docker compose not found"
                "\nInstallation guide: "
                "https://docs.docker.com/compose/cli-command/#installing-compose-v2"
                "\nor try the automated installation with {command}",
                command=RED("rapydo install compose"),
            )
Exemplo n.º 6
0
def get_username(uid: int) -> str:
    try:
        import pwd

        return pwd.getpwuid(uid).pw_name
    # Can fail on Windows
    except ImportError as e:  # pragma: no cover
        log.debug(e)
        return str(uid)
Exemplo n.º 7
0
    def git_checks(ignore_submodule: List[str]) -> None:

        for name, gitobj in Application.gits.items():
            if name in ignore_submodule:
                log.debug("Skipping checks on {}", name)
                continue
            if gitobj:
                git.check_updates(name, gitobj)
                git.check_unstaged(name, gitobj)
Exemplo n.º 8
0
    def check_program(
        program: str,
        min_version: Optional[str] = None,
        max_version: Optional[str] = None,
        min_recommended_version: Optional[str] = None,
    ) -> str:
        """
        Verify if a binary exists and (optionally) its version
        """

        found_version = Packages.get_bin_version(program)
        if found_version is None:

            hints = ""
            if program == "docker":  # pragma: no cover
                install_cmd = RED("rapydo install docker")
                hints = "\n\nTo install docker visit: https://get.docker.com"
                hints += f"or execute {install_cmd}"

            print_and_exit("A mandatory dependency is missing: {} not found{}",
                           program, hints)

        v = Version(found_version)
        if min_version is not None:
            if Version(min_version) > v:
                print_and_exit(
                    "Minimum supported version for {} is {}, found {}",
                    program,
                    min_version,
                    found_version,
                )

        if min_recommended_version is not None:
            if Version(min_recommended_version) > v:
                log.warning(
                    "Minimum recommended version for {} is {}, found {}",
                    program,
                    min_recommended_version,
                    found_version,
                )

        if max_version is not None:
            if Version(max_version) < v:
                print_and_exit(
                    "Maximum supported version for {} is {}, found {}",
                    program,
                    max_version,
                    found_version,
                )

        log.debug("{} version: {}", program, found_version)
        return found_version
Exemplo n.º 9
0
    def command(self, command, options=None, nofailure=False):

        # NOTE: debug defaults
        # tmp = self.get_defaults(command)
        # print("TEST", tmp, type(tmp))
        # # exit(1)

        compose_handler = self.get_handle()
        method = getattr(compose_handler, command)

        if options is None:
            options = {}

        if options.get('SERVICE', None) is None:
            options['SERVICE'] = []

        log.debug("{}'{}'", compose_log, command)

        out = None
        # sometimes this import stucks... importing here to avoid unnecessary waits
        from docker.errors import APIError
        try:
            out = method(options=options)
        except SystemExit as e:
            # NOTE: we check the status here.
            # System exit is received also when a normal command finished.
            if e.code < 0:
                log.warning("Invalid code returned: {}", e.code)
            elif e.code > 0:
                log.exit("Compose received: system.exit({})",
                         e.code,
                         error_code=e.code)
            else:
                log.verbose("Executed compose {} w/{}", command, options)
        except (clierrors.UserError, cerrors.OperationFailedError,
                BuildError) as e:
            msg = "Failed command execution:\n{}".format(e)
            if nofailure:
                raise AttributeError(msg)
            else:
                log.exit(msg)
        except APIError as e:
            log.exit("Failed docker container:\n{}", e)
        except (ProjectError, NoSuchService) as e:
            log.exit(str(e))
        else:
            log.verbose("Executed compose {} w/{}", command, options)

        return out
Exemplo n.º 10
0
    def git_update(ignore_submodule: List[str]) -> None:

        for name, gitobj in Application.gits.items():
            if name in ignore_submodule:
                log.debug("Skipping update on {}", name)
                continue

            if gitobj and not git.can_be_updated(name, gitobj):
                print_and_exit("Can't continue with updates")

        controller_is_updated = False
        for name, gitobj in Application.gits.items():
            if name in ignore_submodule:
                continue

            if name == "do":
                controller_is_updated = True

            if gitobj:
                git.update(name, gitobj)

        if controller_is_updated:
            installation_path = Packages.get_installation_path("rapydo")

            # Can't be tested on GA since rapydo is alway installed from a folder
            if not installation_path:  # pragma: no cover
                log.warning("Controller is not installed in editable mode, "
                            "rapydo is unable to update it")

            elif Application.gits["do"].working_dir:
                do_dir = Path(Application.gits["do"].working_dir)
                if do_dir.is_symlink():
                    do_dir = do_dir.resolve()
                    # This can be used starting from py39
                    # do_dir = do_dir.readlink()

                if do_dir == installation_path:
                    log.info("Controller installed from {} and updated",
                             installation_path)
                else:
                    log.warning(
                        "Controller not updated because it is installed outside this "
                        "project. Installation path is {}, the current folder is {}",
                        installation_path,
                        do_dir,
                    )
            else:  # pragma: no cover
                log.warning("Controller submodule folder can't be found")
Exemplo n.º 11
0
    def __init__(self) -> None:

        self.template_dir = Path(__file__).resolve().parent.joinpath(TEMPLATE_DIR)

        if not self.template_dir.is_dir():
            print_and_exit("Template folder not found: {}", self.template_dir)

        log.debug("Template folder: {}", self.template_dir)
        loader = FileSystemLoader([TEMPLATE_DIR, self.template_dir])

        self.env = Environment(
            loader=loader,
            undefined=DebugUndefined,
            autoescape=True,
            keep_trailing_newline=True,
        )
        self.env.filters["password"] = password
        self.env.filters["username"] = username
Exemplo n.º 12
0
    def print_command(*parameters: Optional[str]) -> None:

        pre_params = " ".join(
            [p for p in Configuration.parameters if p is not None]).strip()
        post_params = " ".join([p for p in parameters
                                if p is not None]).strip()

        if pre_params:
            pre_params = f"{pre_params} "

        if post_params:
            post_params = f" {post_params}"

        log.debug(
            "Command: rapydo {}{}{}",
            pre_params,
            Configuration.action,
            post_params,
            log_to_file=True,
        )
Exemplo n.º 13
0
def find_templates_override(services: ComposeServices,
                            templates: BuildInfo) -> Dict[str, str]:

    builds: Dict[str, str] = {}

    for service in services.values():

        if service.build is not None and service.image not in templates:

            baseimage = get_dockerfile_base_image(service.build.context,
                                                  templates)

            if not baseimage.startswith("rapydo/"):
                continue

            vanilla_img = service.image
            template_img = baseimage
            log.debug("{} extends {}", vanilla_img, template_img)
            builds[vanilla_img] = template_img

    return builds
Exemplo n.º 14
0
def find_active(services: ComposeServices) -> List[str]:
    """
    Check only services involved in current mode,
    which is equal to services 'activated' + 'depends_on'.
    """

    dependencies: Dict[str, List[str]] = {}
    base_actives: List[str] = []

    for name, service in services.items():

        dependencies[name] = list(service.depends_on.keys())

        if service.environment and service.environment.get("ACTIVATE",
                                                           "0") == "1":
            base_actives.append(name)

    log.debug("Base active services: {}", ", ".join(base_actives))
    # log.debug("Services dependencies: {}", ", ".join(dependencies))
    active_services = walk_services(base_actives, dependencies)
    return active_services
Exemplo n.º 15
0
    def create_projectrc(self) -> None:
        templating = Templating()
        t = templating.get_template(
            "projectrc",
            {
                "project": Configuration.project,
                "hostname": Configuration.hostname,
                "swarm": Configuration.swarm_mode,
                "production": Configuration.production,
                "testing": Configuration.testing,
                "services": self.active_services,
                "env_variables": Configuration.environment,
            },
        )
        templating.save_template(PROJECTRC, t, force=True)

        Application.load_projectrc()

        if not self.files:
            log.debug("Created temporary default {} file", PROJECTRC)
            PROJECTRC.unlink()
        else:
            log.info("Created default {} file", PROJECTRC)
Exemplo n.º 16
0
def check_file_younger_than(
    gitobj: Repo, filename: Path, timestamp: Union[str, float]
) -> Tuple[bool, float, datetime]:

    try:
        commits = gitobj.blame(rev="HEAD", file=str(filename))
    except GitCommandError:
        log.debug("Can't retrieve a commit history for {}", filename)
        return False, 0, datetime.fromtimestamp(0)

    # added a default date to prevent errors in case of new files with no blame commits
    dates = [datetime.fromtimestamp(0).replace(tzinfo=pytz.utc)]
    if commits:
        for commit in commits:
            current_blame = gitobj.commit(rev=str(commit[0]))
            dates.append(current_blame.committed_datetime)

    max_date = max(dates)
    return (
        bool(timestamp_from_string(timestamp) < max_date),
        float(timestamp),
        max_date,
    )
Exemplo n.º 17
0
def create_project(
    project_name: str,
    auth: str,
    frontend: str,
    services: List[str],
    extend: Optional[str],
    envs: Optional[List[str]] = None,
    auto: bool = False,
    force: bool = False,
    force_current: bool = False,
    add_optionals: bool = False,
    path: Path = None,
) -> None:

    project_scaffold = Project()
    enable_postgres = auth == "postgres" or "postgres" in services
    enable_mysql = auth == "mysql" or "mysql" in services
    enable_neo4j = auth == "neo4j" or "neo4j" in services
    enable_rabbit = "rabbit" in services
    enable_redis = "redis" in services
    enable_celery = "celery" in services
    enable_flower = "flower" in services
    enable_fail2ban = "fail2ban" in services
    enable_ftp = "ftp" in services
    enable_bot = "bot" in services

    if auth == "postgres" or auth == "mysql":
        auth = "sqlalchemy"

    if auth == "no":
        auth = NO_AUTHENTICATION

    if frontend == "no":
        frontend = NO_FRONTEND

    if not force_current:
        dirs = os.listdir(".")
        if dirs and dirs != [".git"]:
            print_and_exit(
                "Current folder is not empty, cannot create a new project here.\n"
                "Found: {}\n"
                "Use --current to force the creation here",
                ", ".join(dirs[0:3]),  # add first 3 files/folders found
            )

    celery_broker = None  # Keep default value == REDIS
    celery_backend = None  # Keep default value == REDIS
    if enable_celery:

        if enable_rabbit:
            celery_broker = "RABBIT"
        else:
            celery_broker = "REDIS"
            enable_redis = True

        if enable_redis:
            celery_backend = "REDIS"
        else:
            celery_backend = "RABBIT"

    env_variables = parse_env_variables(envs)

    project_scaffold.load_project_scaffold(project_name, auth, services)
    if frontend != NO_FRONTEND:
        project_scaffold.load_frontend_scaffold(frontend)

    # In case of errors this function will exit
    project_scaffold.check_invalid_characters(project_name)

    if project_name in project_scaffold.reserved_project_names:
        print_and_exit(
            "You selected a reserved name, invalid project name: {}",
            project_name)

    templating = Templating()

    folders = project_scaffold.expected_folders + project_scaffold.data_folders

    if add_optionals:
        folders += project_scaffold.optionals_folders

    for f in folders:
        if f.exists():
            log.debug("Project folder already exists: {}", f)
            continue
        if not auto:
            print_and_exit("\nmkdir -p {}", f)

        f.mkdir(parents=True, exist_ok=True)

    for f in project_scaffold.suggested_gitkeep:
        f.open("a").close()

    files = project_scaffold.expected_files
    if add_optionals:
        files += project_scaffold.optionals_files

    if path:
        if path not in files:
            print_and_exit("Invalid path, cannot upgrade {}", path)
        else:
            files = [path]

    for p in files:

        template = templating.get_template(
            p.name,
            {
                "version": __version__,
                "project": project_name,
                "auth_service": auth,
                "enable_postgres": enable_postgres,
                "enable_mysql": enable_mysql,
                "enable_neo4j": enable_neo4j,
                "enable_rabbit": enable_rabbit,
                "enable_redis": enable_redis,
                "enable_celery": enable_celery,
                "enable_flower": enable_flower,
                "enable_fail2ban": enable_fail2ban,
                "enable_ftp": enable_ftp,
                "enable_bot": enable_bot,
                "celery_broker": celery_broker,
                "celery_backend": celery_backend,
                "frontend": frontend,
                "testing": Configuration.testing,
                "extend": extend,
                "services": services,
                "env_variables": env_variables,
            },
        )

        # automatic creation
        if auto:
            if p.exists() and not force:
                log.info("Project file already exists: {}", p)
            else:
                templating.save_template(p, template, force=force)
            continue

        # manual creation
        if p.exists():
            log.info("Project file already exists: {}", p)
        else:
            print(f"\n{template}")
            print_and_exit(str(p))

    if not path:
        for p in project_scaffold.raw_files:
            # automatic creation
            if auto:
                if p.exists() and not force:
                    log.info("Project file already exists: {}", p)
                else:
                    shutil.copyfile(templating.template_dir.joinpath(p.name),
                                    p)
                continue

            # manual creation
            if p.exists():
                log.info("Project file already exists: {}", p)
            else:
                # print(f"Missing file: {p}")
                print_and_exit("File is missing: {}", p)
Exemplo n.º 18
0
def check(
    no_git: bool = typer.Option(
        False,
        "--no-git",
        "-s",
        help="Skip checks on git commits",
        show_default=False,
    ),
    no_builds: bool = typer.Option(
        False,
        "--no-builds",
        help="Skip check on docker builds",
        show_default=False,
    ),
    ignore_submodules: List[str] = typer.Option(
        [],
        "--ignore-submodule",
        "-i",
        help="Ignore submodule",
        show_default=False,
        shell_complete=Application.autocomplete_submodule,
    ),
) -> None:

    Application.print_command(
        Application.serialize_parameter("--no-git", no_git, IF=no_git),
        Application.serialize_parameter("--no-builds", no_builds,
                                        IF=no_builds),
        Application.serialize_parameter("--ignore-submodule",
                                        ignore_submodules),
    )
    Application.get_controller().controller_init()

    docker = Docker()
    if Configuration.swarm_mode:
        log.debug("Swarm is correctly initialized")

        docker.swarm.check_resources()

    if no_git:
        log.info("Skipping git checks")
    else:
        log.info("Checking git (skip with --no-git)")
        Application.git_checks(ignore_submodules)

    if no_builds:
        log.info("Skipping builds checks")
    else:
        log.info("Checking builds (skip with --no-builds)")

        dimages: List[str] = []

        for img in docker.client.images():
            if img.repo_tags:
                for i in img.repo_tags:
                    dimages.append(i)

        all_builds = find_templates_build(Application.data.compose_config)
        core_builds = find_templates_build(Application.data.base_services)
        overriding_builds = find_templates_override(
            Application.data.compose_config, core_builds)

        for image_tag, build in all_builds.items():

            services = build["services"]
            if not any(x in Application.data.active_services
                       for x in services):
                continue

            if image_tag not in dimages:
                if image_tag in core_builds:
                    log.warning(
                        "Missing {} image, execute {command}",
                        image_tag,
                        command=RED("rapydo pull"),
                    )
                else:
                    log.warning(
                        "Missing {} image, execute {command}",
                        image_tag,
                        command=RED("rapydo build"),
                    )
                continue

            image_creation = get_image_creation(image_tag)
            # Check if some recent commit modified the Dockerfile

            d1, d2 = build_is_obsolete(image_creation, build.get("path"))
            if d1 and d2:
                tmp_from_image = overriding_builds.get(image_tag)
                # This is the case of a build not overriding a core image,
                # e.g nifi or geoserver. In that case from_image is faked to image_tag
                # just to make print_obsolete to print 'build' instead of 'pull'
                if not tmp_from_image and image_tag not in core_builds:
                    tmp_from_image = image_tag

                print_obsolete(image_tag, d1, d2, build.get("service"),
                               tmp_from_image)

            # if FROM image is newer, this build should be re-built
            elif image_tag in overriding_builds:
                from_img = overriding_builds.get(image_tag, "")
                from_build: Optional[TemplateInfo] = core_builds.get(from_img)

                if not from_build:  # pragma: no cover
                    log.critical("Malformed {} image, from build is missing",
                                 image_tag)
                    continue

                # Verify if template build exists
                if from_img not in dimages:  # pragma: no cover
                    log.warning(
                        "Missing template build for {} ({})\n{}",
                        from_build.get("services"),
                        from_img,
                    )

                from_timestamp = get_image_creation(from_img)
                # Verify if template build is obsolete or not

                d1, d2 = build_is_obsolete(from_timestamp,
                                           from_build.get("path"))
                if d1 and d2:  # pragma: no cover
                    print_obsolete(from_img, d1, d2, from_build.get("service"))

                if from_timestamp > image_creation:
                    b = image_creation.strftime(DATE_FORMAT)
                    c = from_timestamp.strftime(DATE_FORMAT)
                    print_obsolete(image_tag, b, c, build.get("service"),
                                   from_img)

    templating = Templating()
    for filename in Application.project_scaffold.fixed_files:
        if templating.file_changed(str(filename)):
            log.warning(
                "{} changed, please execute {command}",
                filename,
                command=RED(f"rapydo upgrade --path {filename}"),
            )

    compose_version = "Unknown"
    buildx_version = "Unknown"
    m = re.search(
        r"^Docker Compose version (v[0-9]+\.[0-9]+\.[0-9]+)$",
        docker.client.compose.version(),
    )
    if m:
        compose_version = m.group(1)

    m = re.search(
        r"^github.com/docker/buildx (v[0-9]+\.[0-9]+\.[0-9]+) .*$",
        docker.client.buildx.version(),
    )
    if m:
        buildx_version = m.group(1)

    if compose_version == COMPOSE_VERSION:
        log.info("Compose is installed with version {}", COMPOSE_VERSION)
    else:  # pragma: no cover
        cmd = RED("rapydo install compose")
        fix_hint = f"You can update it with {cmd}"
        log.warning(
            "Compose is installed with version {}, expected version is {}.\n{}",
            compose_version,
            COMPOSE_VERSION,
            fix_hint,
        )

    if buildx_version == BUILDX_VERSION:
        log.info("Buildx is installed with version {}", BUILDX_VERSION)
    else:  # pragma: no cover
        cmd = RED("rapydo install buildx")
        fix_hint = f"You can update it with {cmd}"
        log.warning(
            "Buildx is installed with version {}, expected version is {}.\n{}",
            buildx_version,
            BUILDX_VERSION,
            fix_hint,
        )

    for expired_passwords in get_expired_passwords():
        log.warning(
            "{} is expired on {}",
            expired_passwords[0],
            expired_passwords[1].strftime("%Y-%m-%d"),
        )

    log.info("Checks completed")
Exemplo n.º 19
0
    def controller_init(self,
                        services: Optional[Iterable[str]] = None) -> None:
        if Configuration.create:
            Application.check_installed_software()
            return None

        main_folder_error = Application.project_scaffold.check_main_folder()

        if main_folder_error:
            print_and_exit(main_folder_error)

        if not Configuration.print_version:
            Application.check_installed_software()

        # if project is None, it is retrieve by project folder
        Configuration.project = Application.project_scaffold.get_project(
            Configuration.project)
        Configuration.ABS_PROJECT_PATH = PROJECT_DIR.joinpath(
            Configuration.project)

        if Configuration.print_version:
            self.read_specs(read_extended=True)
            return None

        log.debug("You are using RAPyDo version {}", __version__)
        if Configuration.check:
            log.info("Selected project: {}", Configuration.project)
        else:
            log.debug("Selected project: {}", Configuration.project)

        if (Configuration.initialize or Configuration.update
                or Configuration.check or Configuration.install):
            Application.check_internet_connection()

        if Configuration.install:
            self.read_specs(read_extended=False)
            return None

        # Auth is not available yet, will be read by read_specs
        Application.project_scaffold.load_project_scaffold(
            Configuration.project, auth=None)
        Application.preliminary_version_check()

        # read project configuration
        self.read_specs(read_extended=True)

        # from read_specs
        Application.project_scaffold.load_frontend_scaffold(
            Configuration.frontend)
        Application.verify_rapydo_version()
        Application.project_scaffold.inspect_project_folder()

        self.current_uid = system.get_current_uid()
        self.current_gid = system.get_current_gid()

        # Cannot be tested
        if self.current_uid == ROOT_UID:  # pragma: no cover
            self.current_uid = BASE_UID
            log.warning("Current user is 'root'")
        else:
            os_user = system.get_username(self.current_uid)
            log.debug("Current UID: {} ({})", self.current_uid, os_user)
            log.debug("Current GID: {}", self.current_gid)

        if Configuration.initialize:
            return None

        Application.git_submodules()

        if Configuration.update:
            return None

        self.make_env()

        # Compose services and variables
        base_services, compose_config = self.get_compose_configuration(
            services)

        if Configuration.action != "password":
            self.check_placeholders_and_passwords(compose_config,
                                                  self.enabled_services)

        Application.data = CommandsData(
            files=self.files,
            base_files=self.base_files,
            services=self.enabled_services,
            active_services=self.active_services,
            base_services=base_services,
            compose_config=compose_config,
        )

        return None
Exemplo n.º 20
0
def backup(
    service: SupportedServices = typer.Argument(..., help="Service name"),
    force: bool = typer.Option(
        False,
        "--force",
        help="Force the backup procedure",
        show_default=False,
    ),
    max_backups: int = typer.Option(
        0,
        "--max",
        help=
        "Maximum number of backups, older exceeding this number will be removed",
        show_default=False,
    ),
    dry_run: bool = typer.Option(
        False,
        "--dry-run",
        help="Do not perform any backup or delete backup files",
        show_default=False,
    ),
    restart: List[str] = typer.Option(
        [],
        "--restart",
        help=
        "Service to be restarted once completed the backup (multiple allowed)",
        shell_complete=Application.autocomplete_service,
    ),
) -> None:

    Application.print_command(
        Application.serialize_parameter("--force", force, IF=force),
        Application.serialize_parameter("--max", max_backups, IF=max_backups),
        Application.serialize_parameter("--dry-run", dry_run, IF=dry_run),
        Application.serialize_parameter("--restart", restart, IF=restart),
        Application.serialize_parameter("", service.value),
    )

    if dry_run:
        log.warning("Dry run mode is enabled")

    Application.get_controller().controller_init()

    service_name = service.value

    verify_available_images(
        [service_name],
        Application.data.compose_config,
        Application.data.base_services,
    )

    docker = Docker()

    container = docker.get_container(service_name)

    backup_dir = BACKUP_DIR.joinpath(service_name)
    backup_dir.mkdir(parents=True, exist_ok=True)

    if max_backups > 0:
        backups = list(backup_dir.glob(get_date_pattern()))
        if max_backups >= len(backups):
            log.debug("Found {} backup files, maximum not reached",
                      len(backups))
        else:
            for f in sorted(backups)[:-max_backups]:
                if not dry_run:
                    f.unlink()
                log.warning(
                    "{} deleted because exceeding the max number of backup files ({})",
                    f.name,
                    max_backups,
                )

    module = BACKUP_MODULES.get(service.value)

    if not module:  # pragma: no cover
        print_and_exit(f"{service.value} misconfiguration, module not found")

    now = datetime.now().strftime("%Y_%m_%d-%H_%M_%S")
    module.backup(container=container, now=now, force=force, dry_run=dry_run)

    if restart and not dry_run:
        log.info("Restarting services in 20 seconds...")
        time.sleep(10)
        log.info("Restarting services in 10 seconds...")
        time.sleep(10)
        reload(docker, restart)
Exemplo n.º 21
0
def run(
    service: str = typer.Argument(
        ...,
        help="Service name",
        shell_complete=Application.autocomplete_allservice,
    ),
    pull: bool = typer.Option(
        False,
        "--pull",
        help="Pull the image before starting the container",
        show_default=False,
    ),
    debug: bool = typer.Option(
        False,
        "--debug",
        help="Start the container in debug mode",
        show_default=False,
    ),
    command: str = typer.Option(
        None,
        "--command",
        help="UNIX command to be executed in the container",
        show_default=False,
    ),
    user: str = typer.Option(
        None,
        "--user",
        "-u",
        help="User existing in selected service",
        show_default=False,
    ),
    first_port: Optional[int] = typer.Option(
        None,
        "--port",
        "-p",
        help="port to be associated to the current service interface",
    ),
    detach: Optional[bool] = typer.Option(
        None,
        "--detach",
        help="Start the container in detach mode (default for non-interfaces)",
        show_default=False,
    ),
) -> None:

    Application.print_command(
        Application.serialize_parameter("--pull", pull, IF=pull),
        Application.serialize_parameter("--debug", debug, IF=debug),
        Application.serialize_parameter("--command", command, IF=command),
        Application.serialize_parameter("--user", user, IF=user),
        Application.serialize_parameter("--port", first_port, IF=first_port),
        Application.serialize_parameter("", service),
    )

    Configuration.FORCE_COMPOSE_ENGINE = True

    Application.get_controller().controller_init()

    Application.get_controller().check_placeholders_and_passwords(
        Application.data.compose_config, [service])

    if service == REGISTRY and not Configuration.swarm_mode:
        print_and_exit("Can't start the registry in compose mode")

    docker = Docker()
    if Configuration.swarm_mode:
        if service != REGISTRY:
            docker.registry.ping()
        else:

            if docker.registry.ping(do_exit=False):
                registry = docker.registry.get_host()
                print_and_exit("The registry is already running at {}",
                               registry)

            if docker.client.container.exists("registry"):
                log.debug(
                    "The registry container is already existing, removing")
                docker.client.container.remove("registry", force=True)

    if not debug:
        if user:
            print_and_exit("Can't specify a user if debug mode is OFF")
        if command:
            print_and_exit("Can't specify a command if debug mode is OFF")

    if user:
        log.warning(
            "Please remember that users in volatile containers are not mapped on"
            " current uid and gid. You should not write or modify files on volumes"
            " to prevent permissions errors")

    if pull:
        log.info("Pulling image for {}...", service)
        docker.client.compose.pull([service])
    else:
        verify_available_images(
            [service],
            Application.data.compose_config,
            Application.data.base_services,
            is_run_command=True,
        )

    # This is equivalent to the old volatile command
    if debug:
        if not command:
            command = "bash"

        log.info("Starting {}...", service)
        docker.compose.create_volatile_container(
            service,
            command=command,
            user=user,
            # if None the wrapper will automatically switch the default ones
            # How to prevent ports on volatile containers?
            # publish=None,
        )
        log.info("Service {} removed", service)

        return None

    # This is equivalent to the old registry command
    if service == REGISTRY:
        # @ symbol in secrets is not working
        # https://github.com/bitnami/charts/issues/1954
        # Other symbols like # and " also lead to configuration errors
        os.environ["REGISTRY_HTTP_SECRET"] = password(
            param_not_used="", length=96
            # , symbols="%*,-.=?[]^_~"
        )

    publish_ports = get_publish_ports(service, first_port)

    if detach is None:
        if service == "swaggerui" or service == "adminer":
            detach = False
        else:
            detach = True

    log.info("Running {}...", service)

    if service == "swaggerui":
        if Configuration.production:
            prot = "https"
        else:
            prot = "http"

        port = publish_ports[0][0] if publish_ports else first_port
        log.info(
            "You can access SwaggerUI web page here: {}\n",
            f"{prot}://{Configuration.hostname}:{port}",
        )

    if service == "adminer":
        if Configuration.production:
            prot = "https"
        else:
            prot = "http"

        port = publish_ports[0][0] if publish_ports else first_port
        log.info(
            "You can access Adminer interface on: {}\n",
            f"{prot}://{Configuration.hostname}:{port}",
        )

    docker.compose.create_volatile_container(service,
                                             detach=detach,
                                             publish=publish_ports)
Exemplo n.º 22
0
    def get_compose_configuration(
        self,
        enabled_services: Optional[Iterable[str]] = None
    ) -> Tuple[ComposeServices, ComposeServices]:

        compose_files: List[Path] = []

        MODE = f"{Configuration.stack}.yml"
        customconf = Configuration.ABS_PROJECT_PATH.joinpath(
            CONTAINERS_YAML_DIRNAME)
        angular_loaded = False

        def add(p: Path, f: str) -> None:
            compose_files.append(p.joinpath(f))

        if Configuration.load_backend:
            add(CONFS_DIR, "backend.yml")

        if Configuration.load_frontend:
            if Configuration.frontend == ANGULAR:
                add(CONFS_DIR, "angular.yml")
                angular_loaded = True
                if (Configuration.swarm_mode and Configuration.production
                        and not Configuration.FORCE_COMPOSE_ENGINE):
                    add(CONFS_DIR, "swarm_angular_prod_options.yml")

        if Configuration.swarm_mode and not Configuration.FORCE_COMPOSE_ENGINE:
            add(CONFS_DIR, "swarm_options.yml")

        if Application.env.get("NFS_HOST"):
            log.info("NFS Server is enabled")
            add(CONFS_DIR, "volumes_nfs.yml")
        else:
            add(CONFS_DIR, "volumes_local.yml")

        if Configuration.production:
            add(CONFS_DIR, "production.yml")
        else:
            add(CONFS_DIR, "development.yml")

            if angular_loaded:
                add(CONFS_DIR, "angular-development.yml")

        if self.extended_project and self.extended_project_path:
            extendedconf = self.extended_project_path.joinpath(
                CONTAINERS_YAML_DIRNAME)
            # Only added if exists, this is the only non mandatory conf file
            extended_mode_conf = extendedconf.joinpath(MODE)
            if extended_mode_conf.exists():
                compose_files.append(extended_mode_conf)

            if Configuration.load_commons:
                add(extendedconf, "commons.yml")

        if Configuration.load_commons:
            add(customconf, "commons.yml")

        add(customconf, MODE)

        # Read necessary files
        self.files, self.base_files = configuration.read_composer_yamls(
            compose_files)
        # to build the config with files and variables

        from controller.deploy.docker import Docker

        docker = Docker(compose_files=self.base_files,
                        verify_swarm=not Configuration.initialize)
        base_services = docker.compose.get_config().services

        docker = Docker(compose_files=self.files,
                        verify_swarm=not Configuration.initialize)
        compose_config = docker.compose.get_config().services

        self.active_services = services.find_active(compose_config)

        self.enabled_services = services.get_services(
            Configuration.services_list or enabled_services,
            default=self.active_services,
        )

        for service in self.enabled_services:
            if service not in self.active_services:
                print_and_exit("No such service: {}", service)

        log.debug("Enabled services: {}", ", ".join(self.enabled_services))

        self.create_datafile(list(compose_config.keys()), self.active_services)

        return base_services, compose_config
Exemplo n.º 23
0
def build(
    services: List[str] = typer.Argument(
        None,
        help="Services to be built",
        shell_complete=Application.autocomplete_service,
    ),
    core: bool = typer.Option(
        False,
        "--core",
        help="Include core images to the build list",
        show_default=False,
    ),
    force: bool = typer.Option(
        False,
        "--force",
        "-f",
        help="remove the cache to force the build",
        show_default=False,
    ),
) -> bool:
    Application.print_command(
        Application.serialize_parameter("--core", core, IF=core),
        Application.serialize_parameter("--force", force, IF=force),
        Application.serialize_parameter("", services),
    )

    Application.get_controller().controller_init(services)

    docker = Docker()

    if docker.client.buildx.is_installed():
        v = docker.client.buildx.version()
        log.debug("docker buildx is installed: {}", v)
    else:  # pragma: no cover
        print_and_exit(
            "A mandatory dependency is missing: docker buildx not found"
            "\nInstallation guide: https://github.com/docker/buildx#binary-release"
            "\nor try the automated installation with {command}",
            command=RED("rapydo install buildx"),
        )

    if Configuration.swarm_mode:
        docker.registry.ping()
        docker.registry.login()

    images: Set[str] = set()
    if core:
        log.debug("Forcing rebuild of core builds")
        # Create merged compose file with core files only
        docker = Docker(compose_files=Application.data.base_files)
        docker.compose.dump_config(Application.data.services,
                                   set_registry=False)
        log.debug("Compose configuration dumped on {}", COMPOSE_FILE)

        docker.client.buildx.bake(
            targets=Application.data.services,
            files=[COMPOSE_FILE],
            pull=True,
            load=True,
            cache=not force,
        )
        log.info("Core images built")
        if Configuration.swarm_mode:
            log.warning(
                "Local registry push is not implemented yet for core images")

    docker = Docker()
    docker.compose.dump_config(Application.data.services, set_registry=False)
    log.debug("Compose configuration dumped on {}", COMPOSE_FILE)

    core_builds = find_templates_build(Application.data.base_services)
    all_builds = find_templates_build(Application.data.compose_config)

    services_with_custom_builds: List[str] = []
    for image, build in all_builds.items():
        if image not in core_builds:

            # this is used to validate the target Dockerfile:
            if p := build.get("path"):
                get_dockerfile_base_image(p, core_builds)
            services_with_custom_builds.extend(build["services"])
            images.add(image)
Exemplo n.º 24
0
    def load_frontend_scaffold(self, frontend: Optional[str]) -> bool:
        self.frontend = frontend

        if self.frontend is None or self.frontend == NO_FRONTEND:
            log.debug("No frontend framework enabled")
            return False

        self.expected_folders.append(self.p_path("frontend"))

        if self.frontend == ANGULAR:
            self.expected_folders.extend(
                [
                    self.p_path("frontend", "app"),
                    self.p_path("frontend", "styles"),
                    self.p_path("frontend", "integration"),
                    self.p_path("frontend", "assets"),
                    self.p_path("frontend", "assets", "favicon"),
                ]
            )

            self.suggested_gitkeep.append(
                DATA_DIR.joinpath(self.project, "frontend", GITKEEP)
            )

            self.suggested_gitkeep.append(
                self.p_path("frontend", "integration", GITKEEP)
            )

            self.expected_files.extend(
                [
                    self.p_path("frontend", "package.json"),
                    self.p_path("frontend", "styles", "style.scss"),
                    self.p_path("frontend", "styles", "variables.scss"),
                    self.p_path("frontend", "app", "customization.ts"),
                    self.p_path("frontend", "app", "custom.module.ts"),
                    self.p_path("frontend", "app", "custom.navbar.ts"),
                    self.p_path("frontend", "app", "custom.footer.ts"),
                    self.p_path("frontend", "app", "custom.profile.ts"),
                    self.p_path("frontend", "app", "custom.navbar.links.html"),
                    self.p_path("frontend", "app", "custom.navbar.brand.html"),
                    self.p_path("frontend", "app", "custom.footer.html"),
                    self.p_path("frontend", "app", "custom.profile.html"),
                    self.p_path("frontend", "app", "types.ts"),
                ]
            )
            self.raw_files.extend(
                [
                    # Generated with https://realfavicongenerator.net
                    self.p_path(
                        "frontend", "assets", "favicon", "android-chrome-192x192.png"
                    ),
                    self.p_path("frontend", "assets", "favicon", "browserconfig.xml"),
                    self.p_path("frontend", "assets", "favicon", "favicon-32x32.png"),
                    self.p_path("frontend", "assets", "favicon", "mstile-150x150.png"),
                    self.p_path(
                        "frontend", "assets", "favicon", "safari-pinned-tab.svg"
                    ),
                    self.p_path(
                        "frontend", "assets", "favicon", "apple-touch-icon.png"
                    ),
                    self.p_path("frontend", "assets", "favicon", "favicon-16x16.png"),
                    self.p_path("frontend", "assets", "favicon", "favicon.ico"),
                    self.p_path("frontend", "assets", "favicon", "site.webmanifest"),
                ]
            )

            frontend_data_dir = DATA_DIR.joinpath(self.project, "frontend")
            self.data_folders.extend(
                [
                    frontend_data_dir,
                    frontend_data_dir.joinpath("app"),
                    frontend_data_dir.joinpath("node_modules"),
                    DATA_DIR.joinpath(self.project, "karma"),
                    DATA_DIR.joinpath(self.project, "cypress"),
                ]
            )

            self.data_files.extend(
                [
                    frontend_data_dir.joinpath("angular.json"),
                    frontend_data_dir.joinpath("karma.conf.js"),
                    frontend_data_dir.joinpath("package.json"),
                    frontend_data_dir.joinpath("polyfills.ts"),
                    frontend_data_dir.joinpath("tsconfig.json"),
                    frontend_data_dir.joinpath("tsconfig.app.json"),
                    frontend_data_dir.joinpath("tsconfig.spec.json"),
                    frontend_data_dir.joinpath("tsconfig.server.json"),
                    frontend_data_dir.joinpath("cypress.json"),
                ]
            )

            self.obsolete_files.extend(
                [
                    self.p_path("frontend", "app", "app.routes.ts"),
                    self.p_path("frontend", "app", "app.declarations.ts"),
                    self.p_path("frontend", "app", "app.providers.ts"),
                    self.p_path("frontend", "app", "app.imports.ts"),
                    self.p_path("frontend", "app", "app.custom.navbar.ts"),
                    self.p_path("frontend", "app", "app.custom.navbar.html"),
                    self.p_path("frontend", "app", "app.entryComponents.ts"),
                    self.p_path("frontend", "app", "app.home.ts"),
                    self.p_path("frontend", "app", "app.home.html"),
                    self.p_path("frontend", "app", "custom.declarations.ts"),
                    self.p_path("frontend", "app", "custom.routes.ts"),
                    self.p_path("frontend", "app", "custom.project.options.ts"),
                    # Removed since 1.0
                    frontend_data_dir.joinpath("browserslist"),
                    # Removed since 1.2 (replaced with scss in styles)
                    self.p_path("frontend", "css"),
                ]
            )

        return True
Exemplo n.º 25
0
    def dump_config(
        self,
        services: List[str],
        set_registry: bool = True,
        v1_compatibility: bool = False,
    ) -> None:

        compose_config = self.get_config_json()

        clean_config: Dict[str, Any] = {
            "version": compose_config.get("version", COMPOSE_FILE_VERSION),
            "networks": {},
            "volumes": {},
            "services": {},
        }
        networks = set()
        volumes = set()
        binds: Set[Path] = set()

        registry = self.docker_wrapper.registry.get_host()
        # Remove unused services, networks and volumes from compose configuration
        for key, value in compose_config.get("services", {}).items():
            if key not in services:
                continue

            if Configuration.swarm_mode and set_registry and key != REGISTRY:
                value["image"] = f"{registry}/{value['image']}"

            if "healthcheck" in value and "test" in value["healthcheck"]:
                # healtcheck commands can contain env variables double-escaped ($$)
                # When dumped to docker-compose.yml the double escape is removed
                # and when started the single escaped variable is not resolved
                # and breaks the command. Let's double all the $ to restore the
                # expected behavior and counteract the consumed $
                value["healthcheck"]["test"] = [
                    t.replace("$", "$$") for t in value["healthcheck"]["test"]
                ]

            for k, v in value.get("environment", {}).items():
                # Empty variables are converted to None...
                # and None variables are not passed to the container
                # This check can be removed when will be no longer covered
                if v is None:
                    value["environment"][k] = ""

            # Ports are forced to be int to prevent failures with compose
            for idx, port in enumerate(value.get("ports", [])):
                target_port = system.to_int(port["target"])
                published_port = system.to_int(port["published"])

                if target_port is None or published_port is None:
                    print_and_exit(  # pragma: no cover
                        "Can't convert service ports to integers: {}-{}",
                        port["target"],
                        port["published"],
                    )
                port["target"] = target_port
                port["published"] = published_port
                value["ports"][idx] = port
            clean_config["services"][key] = value

            for k in value.get("networks", {}).keys():
                networks.add(k)

            for k in value.get("volumes", []):
                source = k.get("source", "")
                volume_type = k.get("type", "")
                if source and volume_type == "volume":
                    volumes.add(source.split(":")[0])
                elif source and volume_type == "bind":

                    # Remove unsupported option: 'create_host_path'
                    if v1_compatibility:
                        k.get("bind", {}).pop("create_host_path", None)

                    binds.add(Path(source.split(":")[0]))

            # Remove replicas if both replicas and global mode are set
            if "deploy" in value:  # pragma: no cover
                if "replicas" in value["deploy"] and "mode" in value["deploy"]:
                    if value["deploy"]["mode"] == "global":
                        value["deploy"].pop("replicas")

        # Missing folders are then automatically created by the docker engine
        # the runs with root privileges and so create folders as root
        # and this can often lead to issues with permissions.

        for b in binds:
            if not b.exists():
                self.create_local_path(b, "bind folder")

        for net in networks:
            clean_config["networks"][net] = compose_config["networks"].get(net)

        for vol in volumes:
            volume_config = compose_config["volumes"].get(vol)
            if "driver_opts" in volume_config:
                device_type = volume_config["driver_opts"].get("type", "local")
                device = volume_config["driver_opts"].get("device", "")

                if device_type == "nfs" and device:
                    # starting from py39
                    # device = device.removeprefix(":")
                    if device.startswith(":"):
                        device = device[1:]
                    d = Path(device)
                    if not d.exists():
                        self.create_local_path(d, "volume path")

            clean_config["volumes"][vol] = volume_config

        with open(COMPOSE_FILE, "w") as fh:
            fh.write(yaml.dump(clean_config, default_flow_style=False))

        log.debug("Compose configuration dumped on {}", COMPOSE_FILE)
Exemplo n.º 26
0
def init(
    create_projectrc: bool = typer.Option(
        False,
        "--force",
        "-f",
        help="Overwrite initialization files if already exist",
        show_default=False,
    ),
    submodules_path: Path = typer.Option(
        None,
        "--submodules-path",
        help=
        "Link all submodules in an existing folder instead of download them",
    ),
) -> None:

    Application.print_command(
        Application.serialize_parameter("--force",
                                        create_projectrc,
                                        IF=create_projectrc),
        Application.serialize_parameter("--submodules-path",
                                        submodules_path,
                                        IF=submodules_path),
    )
    Application.get_controller().controller_init()

    for p in Application.project_scaffold.data_folders:
        if not p.exists():
            p.mkdir(parents=True, exist_ok=True)

    for p in Application.project_scaffold.data_files:
        if not p.exists():
            p.touch()

    if not Configuration.projectrc and not Configuration.host_configuration:
        create_projectrc = True

    # We have to create the .projectrc twice
    # One generic here with main options and another after the complete
    # conf reading to set services variables
    if create_projectrc:
        Application.get_controller().create_projectrc()
        Application.get_controller().read_specs(read_extended=False)

    if submodules_path is not None:
        if not submodules_path.exists():
            print_and_exit("Local path not found: {}", submodules_path)

    Application.git_submodules(from_path=submodules_path)

    Application.get_controller().read_specs(read_extended=True)
    Application.get_controller().make_env()

    # Compose services and variables
    Application.get_controller().get_compose_configuration()
    # We have to create the .projectrc twice
    # One generic with main options and another here
    # when services are available to set specific configurations
    if create_projectrc:
        Application.get_controller().create_projectrc()
        Application.get_controller().read_specs(read_extended=True)
        Application.get_controller().make_env()

    if Configuration.swarm_mode:
        docker = Docker(verify_swarm=False)
        if not docker.swarm.get_token():
            docker.swarm.init()
            log.info("Swarm is now initialized")
        else:
            log.debug("Swarm is already initialized")

    if Configuration.frontend == ANGULAR:
        yarn_lock = DATA_DIR.joinpath(Configuration.project, "frontend",
                                      "yarn.lock")
        if yarn_lock.exists():
            yarn_lock.unlink()
            log.info("Yarn lock file deleted")

    log.info("Project initialized")
Exemplo n.º 27
0
def check_updates(path, gitobj, fetch_remote='origin', remote_branch=None):

    fetch(path, gitobj, fetch_remote)

    branch = get_active_branch(gitobj)
    if branch is None:
        log.warning("{} repo is detached? Unable to verify updates!", path)
        return False

    if remote_branch is None:
        remote_branch = branch

    max_remote = 20
    log.verbose("Inspecting {}/{}", path, branch)

    # CHECKING COMMITS BEHIND (TO BE PULLED) #
    behind_check = "{}..{}/{}".format(branch, fetch_remote, remote_branch)
    commits_behind = gitobj.iter_commits(behind_check, max_count=max_remote)

    try:
        commits_behind_list = list(commits_behind)
    except GitCommandError:
        log.info(
            "Remote branch {} not found for {} repo. Is it a local branch?".
            format(branch, path))
    else:

        if len(commits_behind_list) > 0:
            log.warning("{} repo should be updated!", path)
        else:
            log.debug("{} repo is updated", path)
        for c in commits_behind_list:
            message = c.message.strip().replace('\n', "")

            sha = c.hexsha[0:7]
            if len(message) > 60:
                message = message[0:57] + "..."
            log.warning("Missing commit from {}: {} ({})", path, sha, message)

    # CHECKING COMMITS AHEAD (TO BE PUSHED) #
    # if path != 'upstream' and remote_branch == branch:
    if remote_branch == branch:
        ahead_check = "{}/{}..{}".format(fetch_remote, remote_branch, branch)
        commits_ahead = gitobj.iter_commits(ahead_check, max_count=max_remote)
        try:
            commits_ahead_list = list(commits_ahead)
        except GitCommandError:
            log.info(
                "Remote branch {} not found for {}. Is it a local branch?".
                format(branch, path))
        else:

            if len(commits_ahead_list) > 0:
                log.warning("You have commits not pushed on {} repo", path)
            else:
                log.debug("You pushed all commits on {} repo", path)
            for c in commits_ahead_list:
                message = c.message.strip().replace('\n', "")

                sha = c.hexsha[0:7]
                if len(message) > 60:
                    message = message[0:57] + "..."
                log.warning("Unpushed commit in {}: {} ({})", path, sha,
                            message)

    return True
Exemplo n.º 28
0
def reload(
    services: List[str] = typer.Argument(
        None,
        help="Services to be reloaded",
        shell_complete=Application.autocomplete_service,
    ),
) -> None:

    Application.print_command(Application.serialize_parameter("", services))

    Application.get_controller().controller_init(services)

    docker = Docker()
    running_services = docker.get_running_services()

    if "frontend" in services and len(services) > 1:
        print_and_exit("Can't reload frontend and other services at once")

    reloaded = 0
    for service in Application.data.services:

        # Special case: frontend in production mode
        if Configuration.production and service == "frontend":
            # Only consider it if explicitly requested in input
            if "frontend" not in services:
                log.debug(
                    "Can't reload the frontend if not explicitly requested")
            else:
                log.info("Reloading frontend...")
                # The frontend build stucks in swarm mode... let's start the container
                # always in compose mode when using the reload comand
                Configuration.FORCE_COMPOSE_ENGINE = True
                Application.get_controller().controller_init([service])
                docker = Docker()
                docker.compose.start_containers([service], force=True)
                reloaded += 1
            continue

        if service not in running_services:
            continue

        containers = docker.get_containers(service)
        if not containers:
            log.warning("Can't find any container for {}", service)
            continue

        try:
            # get the first container from the containers dict
            container = containers.get(list(containers.keys())[0])

            # Just added for typing purpose
            if not container:  # pragma: no conver
                log.warning("Can't find any container for {}", service)
                continue

            output = docker.exec_command(
                container,
                user="******",
                command="ls /usr/local/bin/reload",
                force_output_return=True,
            )

            # this is to consume the iterator and raise the exception with exit code
            if output:
                [_ for _ in output]

        except DockerException as e:
            # fail2ban fails with code 1
            if "It returned with code 1" in str(e):
                log.warning("Service {} does not support the reload command",
                            service)
                continue

            # backend fails with code 2
            if "It returned with code 2" in str(e):
                log.warning("Service {} does not support the reload command",
                            service)
                continue
            raise

        docker.exec_command(containers,
                            user="******",
                            command="/usr/local/bin/reload")
        reloaded += 1

    if reloaded == 0:
        log.info("No service reloaded")
    else:
        log.info("Services reloaded")
Exemplo n.º 29
0
def shell(
    service: str = typer.Argument(
        ...,
        help="Service name",
        shell_complete=Application.autocomplete_service),
    command: str = typer.Argument(
        "bash",
        help="UNIX command to be executed on selected running service"),
    user: Optional[str] = typer.Option(
        None,
        "--user",
        "-u",
        help="User existing in selected service",
        show_default=False,
    ),
    default_command: bool = typer.Option(
        False,
        "--default-command",
        "--default",
        help="Execute the default command configured for the container",
        show_default=False,
    ),
    no_tty: bool = typer.Option(
        False,
        "--no-tty",
        help=
        "Disable pseudo-tty allocation (useful for non-interactive script)",
        show_default=False,
    ),
    replica: int = typer.Option(
        1,
        "--replica",
        "--slot",
        help="Execute the command on the specified replica",
        show_default=False,
    ),
    broadcast: bool = typer.Option(
        False,
        "--broadcast",
        help="Execute the command on all the replicas",
        show_default=False,
    ),
) -> None:

    Application.print_command(
        Application.serialize_parameter("--user", user, IF=user),
        Application.serialize_parameter("--default",
                                        default_command,
                                        IF=default_command),
        Application.serialize_parameter("", service),
        Application.serialize_parameter("", command),
    )

    if no_tty:
        log.warning("--no-tty option is deprecated, you can stop using it")

    if replica > 1 and broadcast:
        print_and_exit("--replica and --broadcast options are not compatible")
    Application.get_controller().controller_init()

    docker = Docker()

    if not user:
        user = services.get_default_user(service)

    if default_command:
        command = services.get_default_command(service)

    log.debug("Requested command: {} with user: {}", command, user
              or "default")
    if broadcast:
        containers = docker.get_containers(service)
        if not containers:
            print_and_exit("No running container found for {} service",
                           service)

        docker.exec_command(containers, user=user, command=command)
    else:
        container = docker.get_container(service, slot=replica)

        if not container:
            if replica != 1:
                print_and_exit("Replica number {} not found for {} service",
                               str(replica), service)
            print_and_exit("No running container found for {} service",
                           service)

        docker.exec_command(container, user=user, command=command)
Exemplo n.º 30
0
def check_updates(path: str, gitobj: Repo) -> None:

    fetch(path, gitobj)

    branch = get_active_branch(gitobj)
    if branch is None:  # pragma: no cover
        log.warning("Is {} repo detached? Unable to verify updates", path)
        return None

    # CHECKING COMMITS BEHIND (TO BE PULLED) #
    commits_behind = gitobj.iter_commits(
        f"{branch}..origin/{branch}", max_count=MAX_FETCHED_COMMITS
    )

    try:
        commits_behind_list = list(commits_behind)
    except GitCommandError:  # pragma: no cover
        log.info(
            "Remote branch {} not found for {} repo. Is it a local branch?",
            branch,
            path,
        )
    else:

        if not commits_behind_list:
            log.debug("{} repo is updated", path)
        else:  # pragma: no cover
            log.warning("{} repo should be updated!", path)
            for c in commits_behind_list:
                message = str(c.message).strip().replace("\n", "")

                if message.startswith("#"):
                    continue

                sha = c.hexsha[0:7]
                if len(message) > 60:
                    message = message[0:57] + "..."
                log.warning("... missing commit from {}: {} ({})", path, sha, message)

    commits_ahead = gitobj.iter_commits(
        f"origin/{branch}..{branch}", max_count=MAX_FETCHED_COMMITS
    )
    try:
        commits_ahead_list = list(commits_ahead)
    except GitCommandError:  # pragma: no cover
        log.info(
            "Remote branch {} not found for {}. Is it a local branch?", branch, path
        )
    else:

        if len(commits_ahead_list) > 0:
            log.warning("You have commits not pushed on {} repo", path)
        else:
            log.debug("You pushed all commits on {} repo", path)
        for c in commits_ahead_list:
            message = str(c.message).strip().replace("\n", "")

            sha = c.hexsha[0:7]
            if len(message) > 60:  # pragma: no cover
                message = message[0:57] + "..."
            log.warning("Unpushed commit in {}: {} ({})", path, sha, message)