def find_active(services): """ Check only services involved in current mode, which is equal to services 'activated' + 'depends_on'. """ dependencies = {} all_services = {} base_actives = [] for service in services: name = service.get('name') all_services[name] = service dependencies[name] = list(service.get('depends_on', {}).keys()) ACTIVATE = glom(service, "environment.ACTIVATE", default=0) is_active = str(ACTIVATE) == "1" if is_active: base_actives.append(name) log.verbose("Base active services = {}", base_actives) log.verbose("Services dependencies = {}", dependencies) active_services = walk_services(base_actives, dependencies) return all_services, active_services
def get_yaml_path(file, path): filepath = os.path.join(path, file) log.verbose("Reading file {}", filepath) if not os.path.exists(filepath): return None return filepath
def fetch(path, gitobj, fetch_remote='origin'): for remote in gitobj.remotes: if remote.name != fetch_remote: log.verbose("Skipping fetch of remote {} on {}", remote, path) continue log.verbose("Fetching {} on {}", remote, path) try: remote.fetch() except GitCommandError as e: log.exit(str(e))
def __init__(self, files): super(Compose, self).__init__() self.files = files # options.update({'--file': self.files}) self.options = {'--file': self.files} # if net is not None: # self.options['--net'] = net self.project_dir = os.curdir self.project_name = get_project_name(self.project_dir) log.verbose("Client compose {}: {}", self.project_name, files)
def remove_redundant_services(services, builds): # this will be the output non_redundant_services = [] # Transform: {'build-name': {'services': ['A', 'B'], [...]} # Into: {'A': 'build-name', 'B': 'build-name'} # NOTE: builds == ALL builds flat_builds = {} for b in builds: for s in builds[b]['services']: flat_builds[s] = b # Group requested services by builds requested_builds = {} for service in services: build_name = flat_builds.get(service) # this service is not built from a rapydo image if build_name is None: # Let's consider not-rapydo-services as non-redundant non_redundant_services.append(service) continue if build_name not in requested_builds: requested_builds[build_name] = [] requested_builds[build_name].append(service) # Transform requested builds from: # {'build-name': {'services': ['A', 'B'], [...]} # NOTE: only considering requested services # to list of non redudant services for build in requested_builds: redundant_services = requested_builds.get(build) if redundant_services is None or len(redundant_services) == 0: continue if len(redundant_services) == 1: non_redundant_services.append(redundant_services[0]) else: service = None for serv in redundant_services: if service is None: service = serv else: service = name_priority(service, serv) non_redundant_services.append(service) log.verbose("Removed redudant services from {} -> {}", services, non_redundant_services) return non_redundant_services
def command(self, command, options=None, nofailure=False): # NOTE: debug defaults # tmp = self.get_defaults(command) # print("TEST", tmp, type(tmp)) # # exit(1) compose_handler = self.get_handle() method = getattr(compose_handler, command) if options is None: options = {} if options.get('SERVICE', None) is None: options['SERVICE'] = [] log.debug("{}'{}'", compose_log, command) out = None # sometimes this import stucks... importing here to avoid unnecessary waits from docker.errors import APIError try: out = method(options=options) except SystemExit as e: # NOTE: we check the status here. # System exit is received also when a normal command finished. if e.code < 0: log.warning("Invalid code returned: {}", e.code) elif e.code > 0: log.exit("Compose received: system.exit({})", e.code, error_code=e.code) else: log.verbose("Executed compose {} w/{}", command, options) except (clierrors.UserError, cerrors.OperationFailedError, BuildError) as e: msg = "Failed command execution:\n{}".format(e) if nofailure: raise AttributeError(msg) else: log.exit(msg) except APIError as e: log.exit("Failed docker container:\n{}", e) except (ProjectError, NoSuchService) as e: log.exit(str(e)) else: log.verbose("Executed compose {} w/{}", command, options) return out
def set_defaults(variables, merge=None): if merge is None: options = {} else: options = merge for variable in variables: if len(variable) == 1: key = '-' + variable elif variable.upper() == variable: key = variable else: key = '--' + variable options[key] = None log.verbose('defaults: {}', options) return options
def read_composer_yamls(composers): base_files = [] all_files = [] # YAML CHECK UP for name, composer in composers.items(): if not composer.pop('if', False): continue log.verbose("Composer {}", name) mandatory = composer.pop('mandatory', False) base = composer.pop('base', False) try: f = composer.get('file') p = composer.get('path') compose = load_yaml_file(file=f, path=p, is_optional=not mandatory) if compose.get('services') is None or len( compose.get('services', {})) < 1: # if mandatory: # log.exit("No service defined in file {}", name) # else: # log.verbose("No service defined in {}, skipping", name) log.verbose("No service defined in {}, skipping", name) continue filepath = get_yaml_path(file=f, path=p) all_files.append(filepath) if base: base_files.append(filepath) except KeyError as e: # if mandatory: # log.exit( # "Composer {}({}) is mandatory.\n{}", name, filepath, e # ) # else: # log.debug("Missing '{}' composer", name) log.exit("Error loading {}: {}", filepath, e) return all_files, base_files
def main(): pretty_errors # pylint:disable=pointless-statement try: # imported here to avoid uncatched Keyboard Interruptions from controller.arguments import ArgParser arguments = ArgParser() from controller.app import Application Application(arguments) except KeyboardInterrupt: log.info("Interrupted by the user") except NotImplementedError as e: print('NOT IMPLEMENTED (yet): {}'.format(e)) else: log.verbose("Application completed")
def build_images( self, builds, current_version, current_uid, current_gid, force_pull=True, no_cache=False, ): try: compose_handler = self.get_handle() for image, build in builds.items(): service = build.get('service') log.verbose("Building image: {}", image) options = { '--no-cache': no_cache, '--parallel': True, '--pull': force_pull, '--force-rm': True, 'SERVICE': [service], } build_args = [] # NOTE: we can set only 1 variable since options is a dict build_args.append("{}={}".format("RAPYDO_VERSION", current_version)) build_args.append("{}={}".format("CURRENT_UID", current_uid)) build_args.append("{}={}".format("CURRENT_GID", current_gid)) if len(build_args) > 0: options['--build-arg'] = build_args compose_handler.build(options=options) log.info("Built image: {}", image) return except SystemExit: log.info("SystemExit during template building")
def find_templates_override(services, templates): # Template and vanilla builds involved in override tbuilds = {} vbuilds = {} for service in services: builder = service.get('build') if builder is not None: dpath = builder.get('context') dockerfile = os.path.join(os.curdir, CONTAINERS_YAML_DIRNAME, dpath) dfp = DockerfileParser(dockerfile) try: cont = dfp.content if cont is None: log.warning("Dockerfile is empty?") else: log.verbose("Parsed dockerfile {}", dpath) except FileNotFoundError as e: log.exit(e) if dfp.baseimage is None: dfp.baseimage = 'unknown_build' # elif dfp.baseimage.endswith(':template'): elif dfp.baseimage.startswith('rapydo/'): if dfp.baseimage not in templates: log.exit("""Unable to find {} in this project \nPlease inspect the FROM image in {}/Dockerfile """.format(dfp.baseimage, dockerfile)) else: vanilla_img = service.get('image') template_img = dfp.baseimage log.verbose("{} overrides {}", vanilla_img, template_img) tbuilds[template_img] = templates.get(template_img) vbuilds[vanilla_img] = template_img return tbuilds, vbuilds
def check_updates(path, gitobj, fetch_remote='origin', remote_branch=None): fetch(path, gitobj, fetch_remote) branch = get_active_branch(gitobj) if branch is None: log.warning("{} repo is detached? Unable to verify updates!", path) return False if remote_branch is None: remote_branch = branch max_remote = 20 log.verbose("Inspecting {}/{}", path, branch) # CHECKING COMMITS BEHIND (TO BE PULLED) # behind_check = "{}..{}/{}".format(branch, fetch_remote, remote_branch) commits_behind = gitobj.iter_commits(behind_check, max_count=max_remote) try: commits_behind_list = list(commits_behind) except GitCommandError: log.info( "Remote branch {} not found for {} repo. Is it a local branch?". format(branch, path)) else: if len(commits_behind_list) > 0: log.warning("{} repo should be updated!", path) else: log.debug("{} repo is updated", path) for c in commits_behind_list: message = c.message.strip().replace('\n', "") sha = c.hexsha[0:7] if len(message) > 60: message = message[0:57] + "..." log.warning("Missing commit from {}: {} ({})", path, sha, message) # CHECKING COMMITS AHEAD (TO BE PUSHED) # # if path != 'upstream' and remote_branch == branch: if remote_branch == branch: ahead_check = "{}/{}..{}".format(fetch_remote, remote_branch, branch) commits_ahead = gitobj.iter_commits(ahead_check, max_count=max_remote) try: commits_ahead_list = list(commits_ahead) except GitCommandError: log.info( "Remote branch {} not found for {}. Is it a local branch?". format(branch, path)) else: if len(commits_ahead_list) > 0: log.warning("You have commits not pushed on {} repo", path) else: log.debug("You pushed all commits on {} repo", path) for c in commits_ahead_list: message = c.message.strip().replace('\n', "") sha = c.hexsha[0:7] if len(message) > 60: message = message[0:57] + "..." log.warning("Unpushed commit in {}: {} ({})", path, sha, message) return True