class Plugin: """ Handle Plugins """ def __init__(self, **kargs): self.path_dirs = PathDirs(**kargs) self.manifest = join(self.path_dirs.meta_dir, "plugin_manifest.cfg") self.p_helper = PluginHelper(**kargs) self.d_client = docker.from_env() self.logger = Logger(__name__) def add(self, repo, tools=None, overrides=None, version="HEAD", branch="master", build=True, user=None, pw=None, groups=None, version_alias=None, wild=None, remove_old=True, disable_old=True, limit_groups=None, core=False): """ Adds a plugin of tool(s) tools is a list of tuples, where the pair is a tool name (path to Dockerfile) and version tools are for explicitly limiting which tools and versions (if version in tuple is '', then defaults to version) overrides is a list of tuples, where the pair is a tool name (path to Dockerfile) and a version overrides are for explicitly removing tools and overriding versions of tools (if version in tuple is '', then tool is removed, otherwise that tool is checked out at the specific version in the tuple) if tools and overrides are left as empty lists, then all tools in the repo are pulled down at the version and branch specified or defaulted to version is globally set for all tools, unless overridden in tools or overrides branch is globally set for all tools build is a boolean of whether or not to build the tools now user is the username for a private repo if needed pw is the password to go along with the username for a private repo groups is globally set for all tools version_alias is globally set for all tools and is a mapping from a friendly version tag to the real version commit ID wild lets you specify individual overrides for additional values in the tuple of tools or overrides. wild is a list containing one or more of the following: branch, build, groups, version_alias the order of the items in the wild list will expect values to be tacked on in the same order to the tuple for tools and overrides in additional to the tool name and version remove_old lets you specify whether or not to remove previously found tools that match to ones being added currently (note does not stop currently running instances of the older version) disable_old lets you specify whether or not to disable previously found tools that match to ones being added currently (note does not stop currently running instances of the older version) limit_groups is a list of groups to build tools for that match group names in vent.template of each tool if exists Examples: - repo=fe: (get all tools from repo 'fe' at version 'HEAD' on branch 'master') - repo=foo, version="3d1f", branch="foo": (get all tools from repo 'foo' at verion '3d1f' on branch 'foo') - repo=foo, tools=[('bar', ''), ('baz', '1d32')]: (get only 'bar' from repo 'foo' at version 'HEAD' on branch 'master' and 'baz' from repo 'foo' at version '1d32' on branch 'master', ignore all other tools in repo 'foo') - repo=foo overrides=[('baz/bar', ''), ('.', '1c4e')], version='4fad': (get all tools from repo 'foo' at verion '4fad' on branch 'master' except 'baz/bar' and for tool '.' get version '1c4e') - repo=foo tools=[('bar', '1a2d')], overrides=[('baz', 'f2a1')]: (not a particularly useful example, but get 'bar' from 'foo' at version '1a2d' and get 'baz' from 'foo' at version 'f2a1' on branch 'master', ignore all other tools) """ # initialize and store class objects self.repo = repo.lower() self.tools = tools if (isinstance(self.tools, list) and len(self.tools) == 0): self.tools = None self.overrides = overrides self.version = version self.branch = branch self.build = build self.groups = groups self.core = core self.path, self.org, self.name = self.p_helper.get_path(repo, core=core) # TODO these need to be implemented self.version_alias = version_alias self.wild = wild self.remove_old = remove_old self.disable_old = disable_old self.limit_groups = limit_groups status = (True, None) status_code, _ = self.p_helper.clone(self.repo, user=user, pw=pw) self.p_helper.apply_path(self.repo) status = self._build_tools(status_code) return status @ErrorHandler def add_image(self, image, link_name, tag=None, registry=None, groups=None): """ Add an image with a tag from a Docker registry. Defaults to the Docker Hub if not specified. Use a Template object to write an image's information to `plugin_manifest.cfg' Args: image(type): docker image link_name(type): fill me Kwargs: tag(type): registry(type): groups(type): Group that the docker image belongs to. Returns: tuple(bool,str): if the function completed successfully, (True, name of image). If the function failed, (False, message about failure) """ status = (True, None) try: pull_name = image org = '' name = image if '/' in image: org, name = image.split('/') else: org = "official" if not tag: tag = "latest" if not registry: registry = "docker.io" full_image = registry + "/" + image + ":" + tag image = self.d_client.images.pull(full_image) section = ':'.join([registry, org, name, '', tag]) namespace = org + '/' + name # set template section and options for tool at version and branch template = Template(template=self.manifest) template.add_section(section) template.set_option(section, "name", name) template.set_option(section, "pull_name", pull_name) template.set_option(section, "namespace", namespace) template.set_option(section, "path", "") template.set_option(section, "repo", registry + '/' + org) template.set_option(section, "enabled", "yes") template.set_option(section, "branch", "") template.set_option(section, "version", tag) template.set_option(section, "last_updated", str(datetime.utcnow()) + " UTC") template.set_option(section, "image_name", image.attrs['RepoTags'][0]) template.set_option(section, "type", "registry") template.set_option(section, "link_name", link_name) template.set_option(section, "commit_id", "") template.set_option(section, "built", "yes") template.set_option(section, "image_id", image.attrs['Id'].split(':')[1][:12]) template.set_option(section, "groups", groups) # write out configuration to the manifest file template.write_config() status = (True, "Successfully added " + full_image) except Exception as e: # pragma: no cover self.logger.error("Couldn't add image because " + str(e)) status = (False, str(e)) return status @ErrorHandler def builder(self, template, match_path, image_name, section, build=None, branch=None, version=None): """ Build tools """ self.logger.info("Starting: builder") self.logger.info("install path: " + str(match_path)) self.logger.info("image name: " + str(image_name)) self.logger.info("build: " + str(build)) self.logger.info("branch: " + str(branch)) self.logger.info("version: " + str(version)) if build: self.build = build elif not hasattr(self, 'build'): self.build = True if branch: self.branch = branch elif not hasattr(self, 'branch'): self.branch = 'master' if version: self.version = version elif not hasattr(self, 'version'): self.version = 'HEAD' cwd = getcwd() self.logger.info("current working directory: " + str(cwd)) try: chdir(match_path) except Exception as e: # pragma: no cover self.logger.error("unable to change to directory: " + str(match_path) + " because: " + str(e)) return None template = self._build_image(template, match_path, image_name, section) chdir(cwd) # get untagged images untagged = None try: untagged = self.d_client.images.list(filters={ "label": "vent", "dangling": "true" }) except Exception as e: # pragma: no cover self.logger.error("unabled to get images to remove: " + str(e)) # remove untagged images if untagged: deleted_images = "" for image in untagged: deleted_images = '\n'.join([deleted_images, image.id]) try: self.d_client.images.remove(image.id, force=True) except Exception as e: # pragma: no cover self.logger.warning("unable to remove image: " + image.id + " because: " + str(e)) self.logger.info("removed dangling images:" + deleted_images) self.logger.info("template of builder: " + str(template)) self.logger.info("Finished: builder") return template def _build_tools(self, status): """ Create list of tools, paths, and versions to be built and sends them to build_manifest Args: status (tuple(bool, str)): Returns: response (tuple(bool, str)): If True, then the function performed as expected and the str is a string """ response = (True, None) # TODO implement features: wild, remove_old, disable_old, limit_groups # check result of clone, ensure successful or that it already exists if status: response = self.p_helper.checkout(branch=self.branch, version=self.version) if response[0]: search_groups = None if self.core: search_groups = 'core' matches = [] if self.tools is None and self.overrides is None: # get all tools matches = self.p_helper.available_tools( self.path, version=self.version, groups=search_groups) elif self.tools is None: # there's only something in overrides # grab all the tools then apply overrides matches = self.p_helper.available_tools( self.path, version=self.version, groups=search_groups) # !! TODO apply overrides to matches elif self.overrides is None: # there's only something in tools # only grab the tools specified matches = PluginHelper.tool_matches(tools=self.tools, version=self.version) else: # both tools and overrides were specified # grab only the tools specified, with the overrides applied o_matches = PluginHelper.tool_matches(tools=self.tools, version=self.version) matches = o_matches for override in self.overrides: override_t = None if override[0] == '.': override_t = ('', override[1]) else: override_t = override for match in o_matches: if override_t[0] == match[0]: matches.remove(match) matches.append(override_t) if len(matches) > 0: self._build_manifest(matches) else: response = (False, status) return response def _build_manifest(self, matches): """ Builds and writes the manifest for the tools being added """ # !! TODO check for pre-existing that conflict with request and # disable and/or remove image for match in matches: # keep track of whether or not to write an additional manifest # entry for multiple instances, and how many additional entries # to write addtl_entries = 0 # remove the .git for adding repo info to manifest if self.repo.endswith('.git'): self.repo = self.repo[:-4] # remove @ in match for template setting purposes if match[0].find('@') >= 0: true_name = match[0].split('@')[1] else: true_name = match[0] template = Template(template=self.manifest) # TODO check for special settings here first for the specific match self.version = match[1] response = self.p_helper.checkout(branch=self.branch, version=self.version) if response[0]: section = self.org + ":" + self.name + ":" + true_name + ":" section += self.branch + ":" + self.version # need to get rid of temp identifiers for tools in same repo match_path = self.path + match[0].split('@')[0] if not self.core: image_name = self.org + "-" + self.name + "-" if match[0] != '': # if tool is in a subdir, add that to the name of the # image image_name += '-'.join(match[0].split('/')[1:]) + "-" image_name += self.branch + ":" + self.version else: image_name = ('cyberreboot/vent-' + match[0].split('/')[-1] + ':' + self.branch) image_name = image_name.replace('_', '-') # check if the section already exists exists, options = template.section(section) previous_commit = None previous_commits = None head = False if exists: for option in options: # TODO check if tool name but a different version # exists - then disable/remove if set if option[0] == 'version' and option[1] == 'HEAD': head = True if option[0] == 'built' and option[1] == 'yes': # !! TODO remove pre-existing image pass if option[0] == 'commit_id': previous_commit = option[1] if option[0] == 'previous_versions': previous_commits = option[1] # check if tool comes from multi directory multi_tool = "no" if match[0].find('@') >= 0: multi_tool = "yes" # !! TODO # check if section should be removed from config i.e. all tools # but new commit removed one that was in a previous commit image_name = image_name.lower() if image_name.endswith(":head"): image_name = image_name.split(":head")[0] + ":HEAD" # set template section & options for tool at version and branch template.add_section(section) template.set_option(section, "name", true_name.split('/')[-1]) template.set_option(section, "namespace", self.org + '/' + self.name) template.set_option(section, "path", match_path) template.set_option(section, "repo", self.repo) template.set_option(section, "enabled", "yes") template.set_option(section, "multi_tool", multi_tool) template.set_option(section, "branch", self.branch) template.set_option(section, "version", self.version) template.set_option(section, "last_updated", str(datetime.utcnow()) + " UTC") template.set_option(section, "image_name", image_name.replace('@', '-')) template.set_option(section, "type", "repository") # save settings in vent.template to plugin_manifest # watch for multiple tools in same directory # just wanted to store match path with @ for path for use in # other actions tool_template = 'vent.template' if match[0].find('@') >= 0: tool_template = match[0].split('@')[1] + '.template' vent_template_path = join(match_path, tool_template) if os.path.exists(vent_template_path): with open(vent_template_path) as f: vent_template_val = f.read() else: vent_template_val = '' settings_dict = ParsedSections(vent_template_val) for setting in settings_dict: template.set_option(section, setting, json.dumps(settings_dict[setting])) # TODO do we need this if we save as a dictionary? vent_template = Template(vent_template_path) vent_status, response = vent_template.option("info", "name") if vent_status: template.set_option(section, "link_name", response) else: template.set_option(section, "link_name", true_name.split('/')[-1]) commit_id = None if self.version == 'HEAD': # remove @ in multi-tools chdir(match_path) cmd = "git rev-parse --short HEAD" commit_id = check_output(shlex.split(cmd), stderr=STDOUT, close_fds=True).strip() template.set_option(section, "commit_id", commit_id) if head: # no need to store previous commits if not HEAD, since # the version will always be the same commit ID if previous_commit and previous_commit != commit_id: if (previous_commits and previous_commit not in previous_commits): previous_commits = (previous_commit + ',' + previous_commits) elif not previous_commits: previous_commits = previous_commit if previous_commits and previous_commits != commit_id: template.set_option(section, "previous_versions", previous_commits) if self.version_alias: template.set_option(section, "version_alias", self.version_alias) if self.groups: template.set_option(section, "groups", self.groups) else: groups = vent_template.option("info", "groups") if groups[0]: template.set_option(section, "groups", groups[1]) # set groups to empty string if no groups defined for tool else: template.set_option(section, "groups", '') template = self._build_image(template, match_path, image_name, section) # write additional entries for multiple instances if addtl_entries > 0: # add 2 for naming conventions for i in range(2, addtl_entries + 2): addtl_section = section.rsplit(':', 2) addtl_section[0] += str(i) addtl_section = ':'.join(addtl_section) template.add_section(addtl_section) orig_vals = template.section(section)[1] for val in orig_vals: template.set_option(addtl_section, val[0], val[1]) template.set_option(addtl_section, "name", true_name.split('/')[-1] + str(i)) # write out configuration to the manifest file template.write_config() # reset to repo directory chdir(self.path) return def _build_image(self, template, match_path, image_name, section, build_local=False): """ Build docker images and store results in template """ def set_instances(template, section, built, image_id=None): """ Set build information for multiple instances """ self.logger.info("entering set_instances") i = 2 while True: addtl_section = section.rsplit(':', 2) addtl_section[0] += str(i) addtl_section = ':'.join(addtl_section) self.logger.info(addtl_section) if template.section(addtl_section)[0]: template.set_option(addtl_section, "built", built) if image_id: template.set_option(addtl_section, "image_id", image_id) template.set_option(addtl_section, "last_updated", Timestamp()) else: break i += 1 # determine whether a tool should be considered a multi instance try: settings_dict = json.loads(template.option(section, 'settings')[1]) if int(settings_dict['instances']) > 1: multi_instance = True else: multi_instance = False except Exception: multi_instance = False # !! TODO return status of whether it built successfully or not if self.build: cwd = getcwd() chdir(match_path) try: # currently can't use docker-py because it doesn't support # labels on images yet name = template.option(section, "name") groups = template.option(section, "groups") repo = template.option(section, "repo") t_type = template.option(section, "type") if groups[1] == "" or not groups[0]: groups = (True, "none") if not name[0]: name = (True, image_name) # pull if '/' in image_name, fallback to build pull = False if '/' in image_name and not build_local: try: self.logger.info("Trying to pull " + image_name) output = check_output(shlex.split("docker pull " + image_name), stderr=STDOUT, close_fds=True) self.logger.info("Pulling " + name[1] + "\n" + str(output)) i_attrs = self.d_client.images.get(image_name).attrs image_id = i_attrs['Id'].split(':')[1][:12] if image_id: template.set_option(section, "built", "yes") template.set_option(section, "image_id", image_id) template.set_option( section, "last_updated", str(datetime.utcnow()) + " UTC") # set other instances too if multi_instance: set_instances(template, section, 'yes', image_id) status = (True, "Pulled " + image_name) self.logger.info(str(status)) else: template.set_option(section, "built", "failed") template.set_option( section, "last_updated", str(datetime.utcnow()) + " UTC") # set other instances too if multi_instace: set_instances(template, section, 'failed') status = (False, "Failed to pull image " + str(output.split('\n')[-1])) self.logger.warning(str(status)) pull = True except Exception as e: # pragma: no cover self.logger.warning("Failed to pull image, going to" " build instead: " + str(e)) if not pull: # see if additional tags needed for images tagged at HEAD commit_tag = "" image_name = image_name.replace('@', '-') if image_name.endswith('HEAD'): commit_id = template.option(section, "commit_id") if commit_id[0]: commit_tag = (" -t " + image_name[:-4] + str(commit_id[1])) # see if additional file arg needed for building multiple # images from same directory file_tag = " ." multi_tool = template.option(section, 'multi_tool') if multi_tool[0] and multi_tool[1] == 'yes': specific_file = template.option(section, 'name')[1] if specific_file == 'unspecified': file_tag = " -f Dockerfile ." else: file_tag = " -f Dockerfile." + specific_file + " ." # update image name with new version for update image_name = image_name.rsplit(':', 1)[0] + ':' + self.version output = check_output( shlex.split("docker build --label" " vent --label" " vent.section=" + section + " --label" " vent.repo=" + repo[1] + " --label" " vent.type=" + t_type[1] + " --label" " vent.name=" + name[1] + " --label" " vent.groups=" + groups[1] + " -t " + image_name + commit_tag + file_tag), stderr=STDOUT, close_fds=True) self.logger.info("Building " + name[1] + "\n" + str(output)) image_id = "" for line in output.split("\n"): suc_str = "Successfully built " if line.startswith(suc_str): image_id = line.split(suc_str)[1].strip() template.set_option(section, "built", "yes") template.set_option(section, "image_id", image_id) template.set_option(section, "last_updated", str(datetime.utcnow()) + " UTC") # set other instances too if multi_instance: set_instances(template, section, 'yes', image_id) except Exception as e: # pragma: no cover self.logger.error("unable to build image: " + str(image_name) + " because: " + str(e)) template.set_option(section, "built", "failed") template.set_option(section, "last_updated", str(datetime.utcnow()) + " UTC") if multi_instance: set_instances(template, section, 'failed') chdir(cwd) else: template.set_option(section, "built", "no") template.set_option(section, "last_updated", str(datetime.utcnow()) + " UTC") if multi_instance: set_instances(template, section, 'no') template.set_option(section, 'running', 'no') return template def list_tools(self): """ Return list of tuples of all tools """ tools = [] template = Template(template=self.manifest) exists, sections = template.sections() if exists: for section in sections: options = { 'section': section, 'enabled': None, 'built': None, 'version': None, 'repo': None, 'branch': None, 'name': None, 'groups': None, 'image_name': None } for option in options.keys(): exists, value = template.option(section, option) if exists: options[option] = value tools.append(options) return tools def remove(self, name=None, repo=None, namespace=None, branch="master", groups=None, enabled="yes", version="HEAD", built="yes"): """ Remove tool (name) or repository, repository is the url. If no arguments are specified, all tools will be removed for the defaults. """ # initialize args = locals() # want to remove things from manifest regardless of if built del args['built'] status = (True, None) # get resulting dict of sections with options that match constraints results, template = self.p_helper.constraint_options(args, []) for result in results: response, image_name = template.option(result, 'image_name') name = template.option(result, 'name')[1] try: settings_dict = json.loads( template.option(result, 'settings')[1]) instances = int(settings_dict['instances']) except Exception: instances = 1 try: # check for container and remove c_name = image_name.replace(':', '-').replace('/', '-') for i in range(1, instances + 1): container_name = c_name + str(i) if i != 1 else c_name container = self.d_client.containers.get(container_name) response = container.remove(v=True, force=True) self.logger.info(response) self.logger.info("Removing plugin container: " + container_name) except Exception as e: # pragma: no cover self.logger.warn("Unable to remove the plugin container: " + container_name + " because: " + str(e)) # check for image and remove try: response = None image_id = template.option(result, 'image_id')[1] response = self.d_client.images.remove(image_id, force=True) self.logger.info(response) self.logger.info("Removing plugin image: " + image_name) except Exception as e: # pragma: no cover self.logger.warn("Unable to remove the plugin image: " + image_name + " because: " + str(e)) # remove tool from the manifest for i in range(1, instances + 1): res = result.rsplit(':', 2) res[0] += str(i) if i != 1 else '' res = ':'.join(res) if template.section(res)[0]: status = template.del_section(res) self.logger.info("Removing plugin tool: " + res) # TODO if all tools from a repo have been removed, remove the repo template.write_config() return status def update(self, name=None, repo=None, namespace=None, branch=None, groups=None): """ Update tool (name) or repository, repository is the url. If no arguments are specified, all tools will be updated """ # initialize args = locals() status = (False, None) options = ['branch', 'groups', 'image_name'] # get resulting dict of sections with options that match constraints results, template = self.p_helper.constraint_options(args, options) for result in results: # check for container and remove try: container_name = results['image_name'].replace(':', '-') \ .replace('/', '-') container = self.d_client.containers.get(container_name) container.remove(v=True, force=True) except Exception as e: # pragma: no cover self.logger.info("Error updating: " + str(result) + " because: " + str(e)) # TODO git pull # TODO build # TODO docker pull # TODO update tool in the manifest self.logger.info("Updating plugin tool: " + result) template.write_config() return status # !! TODO name or group ? def versions(self, name, namespace=None, branch="master"): """ Return available versions of a tool """ # initialize args = locals() versions = [] options = ['version', 'previous_versions'] # get resulting dict of sections with options that match constraints results, _ = self.p_helper.constraint_options(args, options) for result in results: version_list = [results[result]['version']] if 'previous_versions' in results[result]: version_list += (results[result]['previous_versions']) \ .split(',') versions.append((result, version_list)) return versions # !! TODO name or group ? def current_version(self, name, namespace=None, branch="master"): """ Return current version for a given tool """ # initialize args = locals() versions = [] options = ['version'] # get resulting dict of sections with options that match constraints results, _ = self.p_helper.constraint_options(args, options) for result in results: versions.append((result, results[result]['version'])) return versions # !! TODO name or group ? def state(self, name, namespace=None, branch="master"): """ Return state of a tool, disabled/enabled for each version """ # initialize args = locals() states = [] options = ['enabled'] # get resulting dict of sections with options that match constraints results, _ = self.p_helper.constraint_options(args, options) for result in results: if results[result]['enabled'] == 'yes': states.append((result, 'enabled')) else: states.append((result, 'disabled')) return states # !! TODO name or group ? def enable(self, name, namespace=None, branch="master", version="HEAD"): """ Enable tool at a specific version, default to head """ # initialize args = locals() status = (False, None) # get resulting dict of sections with options that match constraints results, template = self.p_helper.constraint_options(args, []) for result in results: status = template.set_option(result, 'enabled', 'yes') template.write_config() return status # !! TODO name or group ? def disable(self, name, namespace=None, branch="master", version="HEAD"): """ Disable tool at a specific version, default to head """ # initialize args = locals() status = (False, None) # get resulting dict of sections with options that match constraints results, template = self.p_helper.constraint_options(args, []) for result in results: status = template.set_option(result, 'enabled', 'no') template.write_config() return status def auto_install(self): """ Automatically detects images and installs them in the manifest if they are not there already """ template = Template(template=self.manifest) sections = template.sections() images = self.d_client.images.list(filters={'label': 'vent'}) add_sections = [] status = (True, None) for image in images: if ('Labels' in image.attrs and 'vent.section' in image.attrs['Config']['Labels'] and not image.attrs['Config']['Labels']['vent.section'] in sections[1]): section = image.attrs['Config']['Labels']['vent.section'] section_str = image.attrs['Config']['Labels'][ 'vent.section'].split(":") template.add_section(section) if 'vent.name' in image.attrs['Config']['Labels']: template.set_option( section, 'name', image.attrs['Config']['Labels']['vent.name']) if 'vent.repo' in image.attrs['Config']['Labels']: template.set_option( section, 'repo', image.attrs['Config']['Labels']['vent.repo']) git_path = join(self.path_dirs.plugins_dir, "/".join(section_str[:2])) if not isdir(git_path): # clone it down status = self.p_helper.clone( image.attrs['Config']['Labels']['vent.repo']) template.set_option(section, 'path', join(git_path, section_str[-3][1:])) # get template settings # TODO account for template files not named vent.template v_template = Template(template=join( git_path, section_str[-3][1:], 'vent.template')) tool_sections = v_template.sections() if tool_sections[0]: for s in tool_sections[1]: section_dict = {} options = v_template.options(s) if options[0]: for option in options[1]: option_name = option if option == 'name': # get link name template.set_option( section, "link_name", v_template.option(s, option)[1]) option_name = 'link_name' opt_val = v_template.option(s, option)[1] section_dict[option_name] = opt_val if section_dict: template.set_option(section, s, json.dumps(section_dict)) if ('vent.type' in image.attrs['Config']['Labels'] and image.attrs['Config']['Labels']['vent.type'] == 'repository'): template.set_option(section, 'namespace', "/".join(section_str[:2])) template.set_option(section, 'enabled', 'yes') template.set_option(section, 'branch', section_str[-2]) template.set_option(section, 'version', section_str[-1]) template.set_option(section, 'last_updated', str(datetime.utcnow()) + " UTC") template.set_option(section, 'image_name', image.attrs['RepoTags'][0]) template.set_option(section, 'type', 'repository') if 'vent.groups' in image.attrs['Config']['Labels']: template.set_option( section, 'groups', image.attrs['Config']['Labels']['vent.groups']) template.set_option(section, 'built', 'yes') template.set_option(section, 'image_id', image.attrs['Id'].split(":")[1][:12]) template.set_option(section, 'running', 'no') # check if image is running as a container containers = self.d_client.containers.list( filters={'label': 'vent'}) for container in containers: if container.attrs['Image'] == image.attrs['Id']: template.set_option(section, 'running', 'yes') add_sections.append(section) template.write_config() if status[0]: status = (True, add_sections) return status
class Plugin: """ Handle Plugins """ def __init__(self, **kargs): self.path_dirs = PathDirs(**kargs) self.manifest = os.path.join(self.path_dirs.meta_dir, "plugin_manifest.cfg") self.d_client = docker.from_env() self.logger = Logger(__name__) def apply_path(self, repo): """ Set path to where the repo is and return original path """ self.logger.info("Starting: apply_path") self.logger.info("repo given: " + str(repo)) status = (True, None) try: # rewrite repo for consistency if repo.endswith(".git"): repo = repo.split(".git")[0] # get org and repo name and path repo will be cloned to org, name = repo.split("/")[-2:] self.path = os.path.join(self.path_dirs.plugins_dir, org, name) self.logger.info("cloning to path: " + str(self.path)) # save current path cwd = os.getcwd() # set to new repo path os.chdir(self.path) status = (True, cwd) except Exception as e: self.logger.error("apply_path failed with error: " + str(e)) status = (False, e) self.logger.info("Status of apply_path: " + str(status)) self.logger.info("Finished: apply_path") return status def repo_branches(self, repo): """ Get the branches of a repository """ self.logger.info("Starting: repo_branches") self.logger.info("repo given: " + str(repo)) status = (True, None) branches = [] try: # switch to directory where repo will be cloned to status = self.apply_path(repo) if status[0]: cwd = status[1] else: self.logger.info( "apply_path failed. Exiting repo_branches with status " + str(status)) return status junk = subprocess.check_output(shlex.split("git pull --all"), stderr=subprocess.STDOUT, close_fds=True) branch_output = subprocess.check_output( shlex.split("git branch -a"), stderr=subprocess.STDOUT, close_fds=True) branch_output = branch_output.split("\n") for branch in branch_output: b = branch.strip() if b.startswith('*'): b = b[2:] if "/" in b: branches.append(b.rsplit('/', 1)[1]) elif b: branches.append(b) branches = list(set(branches)) self.logger.info("branches found: " + str(branches)) for branch in branches: try: junk = subprocess.check_output( shlex.split("git checkout " + branch), stderr=subprocess.STDOUT, close_fds=True) except Exception as e: # pragma: no cover self.logger.error("repo_branches failed with error: " + str(e) + " on branch: " + str(branch)) status = (False, e) self.logger.info("Exiting repo_branches with status: " + str(status)) return status try: os.chdir(cwd) except Exception as e: # pragma: no cover self.logger.error("unable to change directory to: " + str(cwd) + "because: " + str(e)) status = (True, branches) except Exception as e: self.logger.error("repo_branches failed with error: " + str(e)) status = (False, e) self.logger.info("Status of repo_branches: " + str(status)) self.logger.info("Finished: repo_branches") return status def repo_commits(self, repo): """ Get the commit IDs for all of the branches of a repository """ self.logger.info("Starting: repo_commits") self.logger.info("repo given: " + str(repo)) status = (True, None) commits = [] try: status = self.apply_path(repo) # switch to directory where repo will be cloned to if status[0]: cwd = status[1] else: self.logger.info( "apply_path failed. Exiting repo_commits with status: " + str(status)) return status status = self.repo_branches(repo) if status[0]: branches = status[1] for branch in branches: try: branch_output = subprocess.check_output( shlex.split("git rev-list " + branch), stderr=subprocess.STDOUT, close_fds=True) branch_output = ['HEAD' ] + branch_output.split("\n")[:-1] commits.append((branch, branch_output)) except Exception as e: # pragma: no cover self.logger.error("repo_commits failed with error: " + str(e) + " on branch: " + str(branch)) status = (False, e) self.logger.info("Exiting repo_commits with status: " + str(status)) return status else: self.logger.info( "repo_branches failed. Exiting repo_commits with status: " + str(status)) return status try: os.chdir(cwd) except Exception as e: # pragma: no cover self.logger.error("unable to change directory to: " + str(cwd) + " because: " + str(e)) status = (True, commits) except Exception as e: self.logger.error("repo_commits failed with error: " + str(e)) status = (False, e) self.logger.info("Status of repo_commits: " + str(status)) self.logger.info("Finished: repo_commits") return status def repo_tools(self, repo, branch, version): """ Get available tools for a repository branch at a version """ self.logger.info("Starting: repo_tools") self.logger.info("repo given: " + str(repo)) self.logger.info("branch given: " + str(branch)) self.logger.info("version given: " + str(version)) status = (True, None) try: tools = [] status = self.apply_path(repo) # switch to directory where repo will be cloned to if status[0]: cwd = status[1] else: self.logger.info( "apply_path failed. Exiting repo_tools with status: " + str(status)) return status self.branch = branch self.version = version status = self.checkout() if status[0]: tools = self._available_tools() else: self.logger.info( "checkout failed. Exiting repo_tools with status: " + str(status)) return status try: os.chdir(cwd) except Exception as e: # pragma: no cover self.logger.error("unable to change directory to: " + str(cwd) + " because: " + str(e)) status = (True, tools) except Exception as e: self.logger.error("repo_tools failed with error: " + str(e)) status = (False, e) self.logger.info("Status of repo_tools: " + str(status)) self.logger.info("Finished: repo_tools") return status def clone(self, repo, user=None, pw=None): """ Clone the repository """ self.logger.info("Starting: clone") self.logger.info("repo given: " + str(repo)) self.logger.info("user given: " + str(user)) status = (True, None) try: self.org = None self.name = None self.repo = repo # save current path cwd = os.getcwd() self.logger.info("current working directory: " + str(cwd)) # rewrite repo for consistency if self.repo.endswith(".git"): self.repo = self.repo.split(".git")[0] # get org and repo name and path repo will be cloned to self.org, self.name = self.repo.split("/")[-2:] self.logger.info("org name found: " + str(self.org)) self.logger.info("repo name found: " + str(self.name)) self.path = os.path.join(self.path_dirs.plugins_dir, self.org, self.name) self.logger.info("path to clone to: " + str(self.path)) # check if the directory exists, if so return now status = self.path_dirs.ensure_dir(self.path) if not status[0]: self.logger.info( "ensure_dir failed. Exiting clone with status: " + str(status)) return status # set to new repo path os.chdir(self.path) # if path already exists, try git checkout to update if status[0] and status[1] == 'exists': try: response = subprocess.check_output( shlex.split("git -C " + self.path + " rev-parse"), stderr=subprocess.STDOUT, close_fds=True) self.logger.info("path already exists: " + str(self.path)) status = (True, cwd) self.logger.info("Status of clone: " + str(status)) self.logger.info("Finished: clone") return status except Exception as e: # pragma: no cover self.logger.error("unable to checkout: " + str(path) + " because: " + str(e)) status = (False, e) self.logger.info("Exiting clone with status: " + str(status)) return status # ensure cloning still works even if ssl is broken...probably should be improved response = subprocess.check_output( shlex.split("git config --global http.sslVerify false"), stderr=subprocess.STDOUT, close_fds=True) # check if user and pw were supplied, typically for private repos if user and pw: # only https is supported when using user/pw repo = 'https://' + user + ':' + pw + '@' + self.repo.split( "https://")[-1] # clone repo and build tools response = subprocess.check_output( shlex.split("git clone --recursive " + repo + " ."), stderr=subprocess.STDOUT, close_fds=True) status = (True, cwd) except Exception as e: self.logger.error("clone failed with error: " + str(e)) status = (False, e) self.logger.info("Status of clone: " + str(status)) self.logger.info("Finished: clone") return status def add(self, repo, tools=None, overrides=None, version="HEAD", branch="master", build=True, user=None, pw=None, groups=None, version_alias=None, wild=None, remove_old=True, disable_old=True, limit_groups=None): """ Adds a plugin of tool(s) tools is a list of tuples, where the pair is a tool name (path to Dockerfile) and version tools are for explicitly limiting which tools and versions (if version in tuple is '', then defaults to version) overrides is a list of tuples, where the pair is a tool name (path to Dockerfile) and a version overrides are for explicitly removing tools and overriding versions of tools (if version in tuple is '', then tool is removed, otherwise that tool is checked out at the specific version in the tuple) if tools and overrides are left as empty lists, then all tools in the repo are pulled down at the version and branch specified or defaulted to version is globally set for all tools, unless overridden in tools or overrides branch is globally set for all tools build is a boolean of whether or not to build the tools now user is the username for a private repo if needed pw is the password to go along with the username for a private repo groups is globally set for all tools version_alias is globally set for all tools and is a mapping from a friendly version tag to the real version commit ID wild lets you specify individual overrides for additional values in the tuple of tools or overrides. wild is a list containing one or more of the following: branch, build, groups, version_alias the order of the items in the wild list will expect values to be tacked on in the same order to the tuple for tools and overrides in additional to the tool name and version remove_old lets you specify whether or not to remove previously found tools that match to ones being added currently (note does not stop currently running instances of the older version) disable_old lets you specify whether or not to disable previously found tools that match to ones being added currently (note does not stop currently running instances of the older version) limit_groups is a list of groups to build tools for that match group names in vent.template of each tool if exists Examples: repo=fe (get all tools from repo 'fe' at version 'HEAD' on branch 'master') repo=foo, version="3d1f", branch="foo" (get all tools from repo 'foo' at verion '3d1f' on branch 'foo') repo=foo, tools=[('bar', ''), ('baz', '1d32')] (get only 'bar' from repo 'foo' at version 'HEAD' on branch 'master' and 'baz' from repo 'foo' at version '1d32' on branch 'master', ignore all other tools in repo 'foo') repo=foo overrides=[('baz/bar', ''), ('.', '1c4e')], version='4fad' (get all tools from repo 'foo' at verion '4fad' on branch 'master' except 'baz/bar' and for tool '.' get version '1c4e') repo=foo tools=[('bar', '1a2d')], overrides=[('baz', 'f2a1')] (not a particularly useful example, but get 'bar' from 'foo' at version '1a2d' and get 'baz' from 'foo' at version 'f2a1' on branch 'master', ignore all other tools) """ # initialize and store class objects self.tools = tools self.overrides = overrides self.version = version self.branch = branch self.build = build self.groups = groups # TODO these need to be implemented self.version_alias = version_alias self.wild = wild self.remove_old = remove_old self.disable_old = disable_old self.limit_groups = limit_groups status = (True, None) status_code, cwd = self.clone(repo, user=user, pw=pw) status = self._build_tools(status_code) # set back to original path try: os.chdir(cwd) except Exception as e: # pragma: no cover pass return status @ErrorHandler def builder(self, template, match_path, image_name, section, build=None, branch=None, version=None): """ Build tools """ self.logger.info("Starting: builder") self.logger.info("install path: " + str(match_path)) self.logger.info("image name: " + str(image_name)) self.logger.info("build: " + str(build)) self.logger.info("branch: " + str(branch)) self.logger.info("version: " + str(version)) if build: self.build = build elif not hasattr(self, 'build'): self.build = True if branch: self.branch = branch elif not hasattr(self, 'branch'): self.branch = 'master' if version: self.version = version elif not hasattr(self, 'version'): self.version = 'HEAD' cwd = os.getcwd() self.logger.info("current working directory: " + str(cwd)) try: os.chdir(match_path) except Exception as e: self.logger.error("unable to change to directory: " + str(match_path) + " because: " + str(e)) return None template = self._build_image(template, match_path, image_name, section) try: os.chdir(cwd) except Exception as e: # pragma: no cover self.logger.error("unable to change to directory: " + str(cwd) + " because: " + str(e)) self.logger.info("template of builder: " + str(template)) self.logger.info("Finished: builder") return template def _build_tools(self, status): """ Create list of tools, paths, and versions to be built and sends them to build_manifest """ response = (True, None) # !! TODO implement features: wild, remove_old, disable_old, limit_groups # check result of clone, ensure successful or that it already exists if status: response = self.checkout() if response[0]: matches = [] if self.tools is None and self.overrides is None: # get all tools matches = self._available_tools() elif self.tools is None: # there's only something in overrides # grab all the tools then apply overrides matches = self._available_tools() # !! TODO apply overrides to matches elif self.overrides is None: # there's only something in tools # only grab the tools specified matches = self.get_tool_matches() else: # both tools and overrides were specified # grab only the tools specified, with the overrides applied orig_matches = self.get_tool_matches() matches = orig_matches for override in self.overrides: override_t = None if override[0] == '.': override_t = ('', override[1]) else: override_t = override for match in orig_matches: if override_t[0] == match[0]: matches.remove(match) matches.append(override_t) if len(matches) > 0: self._build_manifest(matches) else: response = (False, status) return response def get_tool_matches(self): """ Get the tools paths and versions that were specified by self.tools and self.version """ matches = [] if not hasattr(self, 'tools'): self.tools = [] if not hasattr(self, 'version'): self.version = 'HEAD' for tool in self.tools: match_version = self.version if tool[1] != '': match_version = tool[1] match = '' if tool[0].endswith('/'): match = tool[0][:-1] elif tool[0] != '.': match = tool[0] if not match.startswith('/') and match != '': match = '/' + match matches.append((match, match_version)) return matches def _build_manifest(self, matches): """ Builds and writes the manifest for the tools being added """ # !! TODO check for pre-existing that conflict with request and disable and/or remove image for match in matches: template = Template(template=self.manifest) # !! TODO check for special settings here first for the specific match self.version = match[1] response = self.checkout() if response[0]: section = self.org + ":" + self.name + ":" + match[ 0] + ":" + self.branch + ":" + self.version match_path = self.path + match[0] image_name = self.org + "-" + self.name + "-" if match[0] != '': # if tool is in a subdir, add that to the name of the image image_name += '-'.join(match[0].split('/')[1:]) + "-" image_name += self.branch + ":" + self.version # check if the section already exists exists, options = template.section(section) previous_commit = None previous_commits = None head = False if exists: for option in options: # TODO check if tool name but a different version exists - then disable/remove if set if option[0] == 'version' and option[1] == 'HEAD': head = True if option[0] == 'built' and option[1] == 'yes': # !! TODO remove pre-existing image pass if option[0] == 'commit_id': previous_commit = option[1] if option[0] == 'previous_versions': previous_commits = option[1] # !! TODO # check if section should be removed from config - i.e. all tools, # but new commit removed one that was in a previous commit # set template section and options for tool at version and branch template.add_section(section) template.set_option(section, "name", match[0].split('/')[-1]) template.set_option(section, "namespace", self.org + '/' + self.name) template.set_option(section, "path", match_path) template.set_option(section, "repo", self.repo) template.set_option(section, "enabled", "yes") template.set_option(section, "branch", self.branch) template.set_option(section, "version", self.version) template.set_option(section, "last_updated", str(datetime.datetime.utcnow()) + " UTC") template.set_option(section, "image_name", image_name) vent_template = Template( template=os.path.join(match_path, 'vent.template')) vent_status, response = vent_template.option("info", "name") if vent_status: template.set_option(section, "link_name", response) else: template.set_option(section, "link_name", match[0].split('/')[-1]) commit_id = None if self.version == 'HEAD': os.chdir(match_path) commit_id = subprocess.check_output( shlex.split("git rev-parse --short HEAD"), stderr=subprocess.STDOUT, close_fds=True).strip() template.set_option(section, "commit_id", commit_id) if head: # no need to store previous commits if not HEAD, since # the version will always be the same commit ID if previous_commit and previous_commit != commit_id: if previous_commits and previous_commit not in previous_commits: previous_commits = previous_commit + ',' + previous_commits elif not previous_commits: previous_commits = previous_commit if previous_commits and previous_commits != commit_id: template.set_option(section, "previous_versions", previous_commits) if self.version_alias: template.set_option(section, "version_alias", self.version_alias) if self.groups: template.set_option(section, "groups", self.groups) else: vent_template = os.path.join(match_path, 'vent.template') if os.path.exists(vent_template): v_template = Template(template=vent_template) groups = v_template.option("info", "groups") if groups[0]: template.set_option(section, "groups", groups[1]) template = self._build_image(template, match_path, image_name, section) # write out configuration to the manifest file template.write_config() # reset to repo directory os.chdir(self.path) return def _build_image(self, template, match_path, image_name, section): """ Build docker images and store results in template """ # !! TODO return status of whether it built successfully or not if self.build: try: os.chdir(match_path) # currently can't use docker-py because it doesn't support labels on images yet name = template.option(section, "name") groups = template.option(section, "groups") if groups[1] == "" or not groups[0]: groups = (True, "none") if not name[0]: name = (True, image_name) # pull if '/' in image_name, fallback to build pull = False if '/' in image_name: try: self.logger.info("Trying to pull " + image_name) output = subprocess.check_output( shlex.split("docker pull " + image_name), stderr=subprocess.STDOUT, close_fds=True) self.logger.info("Pulling " + name[1] + "\n" + str(output)) for line in output.split('\n'): if line.startswith("Digest: sha256:"): image_id = line.split( "Digest: sha256:")[1][:12] if image_id: template.set_option(section, "built", "yes") template.set_option(section, "image_id", image_id) template.set_option( section, "last_updated", str(datetime.datetime.utcnow()) + " UTC") status = (True, "Pulled " + image_name) self.logger.info(str(status)) else: template.set_option(section, "built", "failed") template.set_option( section, "last_updated", str(datetime.datetime.utcnow()) + " UTC") status = (False, "Failed to pull image " + str(output.split('\n')[-1])) self.logger.warning(str(status)) pull = True except Exception as e: # pragma: no cover self.logger.warning( "Failed to pull image, going to build instead: " + str(e)) if not pull: output = subprocess.check_output(shlex.split( "docker build --label vent --label vent.name=" + name[1] + " --label vent.groups=" + groups[1] + " -t " + image_name + " ."), stderr=subprocess.STDOUT, close_fds=True) self.logger.info("Building " + name[1] + "\n" + str(output)) image_id = "" for line in output.split("\n"): if line.startswith("Successfully built "): image_id = line.split( "Successfully built ")[1].strip() template.set_option(section, "built", "yes") template.set_option(section, "image_id", image_id) template.set_option( section, "last_updated", str(datetime.datetime.utcnow()) + " UTC") except Exception as e: # pragma: no cover self.logger.error("unable to build image: " + str(image_name) + " because: " + str(e)) template.set_option(section, "built", "failed") template.set_option(section, "last_updated", str(datetime.datetime.utcnow()) + " UTC") else: template.set_option(section, "built", "no") template.set_option(section, "last_updated", str(datetime.datetime.utcnow()) + " UTC") return template def _available_tools(self, groups=None): """ Return list of possible tools in repo for the given version and branch """ matches = [] if not hasattr(self, 'path'): return matches if groups: groups = groups.split(",") for root, dirnames, filenames in os.walk(self.path): for filename in fnmatch.filter(filenames, 'Dockerfile'): # !! TODO deal with wild/etc.? if groups: try: template = Template( template=os.path.join(root, 'vent.template')) for group in groups: template_groups = template.option("info", "groups") if template_groups[0] and group in template_groups[ 1]: matches.append( (root.split(self.path)[1], self.version)) except Exception as e: # pragma: no cover pass else: matches.append((root.split(self.path)[1], self.version)) return matches def checkout(self): """ Checkout a specific version and branch of a repo """ if not hasattr(self, 'branch'): self.branch = 'master' if not hasattr(self, 'version'): self.version = 'HEAD' response = (True, None) try: status = subprocess.check_output(shlex.split("git checkout " + self.branch), stderr=subprocess.STDOUT, close_fds=True) status = subprocess.check_output(shlex.split("git pull"), stderr=subprocess.STDOUT, close_fds=True) status = subprocess.check_output(shlex.split("git reset --hard " + self.version), stderr=subprocess.STDOUT, close_fds=True) response = (True, status) except Exception as e: # pragma: no cover response = (False, os.getcwd() + str(e)) return response @staticmethod def add_image(image, tag="latest"): """ Add an image from a registry/hub rather than building from a repository """ # !! TODO return def constraint_options(self, constraint_dict, options): """ Return result of constraints and options against a template """ constraints = {} template = Template(template=self.manifest) for constraint in constraint_dict: if constraint != 'self': if constraint_dict[constraint] or constraint_dict[ constraint] == '': constraints[constraint] = constraint_dict[constraint] results = template.constrained_sections(constraints=constraints, options=options) return results, template def tools(self): """ Return list of tuples of all tools """ tools = [] template = Template(template=self.manifest) exists, sections = template.sections() if exists: for section in sections: options = { 'section': section, 'enabled': None, 'built': None, 'version': None, 'repo': None, 'branch': None, 'name': None, 'groups': None, 'image_name': None } for option in options.keys(): exists, value = template.option(section, option) if exists: options[option] = value tools.append(options) return tools def remove(self, name=None, repo=None, namespace=None, branch="master", groups=None, enabled="yes", version="HEAD", built="yes"): """ Remove tool (name) or repository, repository is the url. If no arguments are specified, all tools will be removed for the defaults. """ # initialize args = locals() status = (True, None) # get resulting dictionary of sections with options that match constraints results, template = self.constraint_options(args, []) for result in results: response, image_name = template.option(result, 'image_name') # check for container and remove container_name = image_name.replace(':', '-').replace('/', '-') try: container = self.d_client.containers.get(container_name) response = container.remove(v=True, force=True) self.logger.info(response) self.logger.info("Removing plugin container: " + container_name) except Exception as e: # pragma: no cover self.logger.warn("Unable to remove the plugin container: " + container_name + " because: " + str(e)) # check for image and remove try: response = self.d_client.images.remove(image_name) self.logger.info(response) self.logger.info("Removing plugin image: " + image_name) except Exception as e: # pragma: no cover self.logger.warn("Unable to remove the plugin image: " + image_name + " because: " + str(e)) # remove tool from the manifest status = template.del_section(result) self.logger.info("Removing plugin tool: " + result) # TODO if all tools from a repo have been removed, remove the repo template.write_config() return status def update(self, name=None, repo=None, namespace=None, branch=None, groups=None): """ Update tool (name) or repository, repository is the url. If no arguments are specified, all tools will be updated """ # initialize args = locals() status = (False, None) options = ['branch', 'groups', 'image_name'] # get resulting dictionary of sections with options that match constraints results, template = self.constraint_options(args, options) for result in results: # check for container and remove try: container_name = results['image_name'].replace(':', '-') \ .replace('/', '-') container = self.d_client.containers.get(container_name) response = container.remove(v=True, force=True) except Exception as e: # pragma: no cover pass # TODO git pull # TODO build # TODO docker pull # TODO update tool in the manifest self.logger.info("Updating plugin tool: " + result) template.write_config() return status # !! TODO name or group ? def versions(self, name, namespace=None, branch="master"): """ Return available versions of a tool """ # initialize args = locals() versions = [] options = ['version', 'previous_versions'] # get resulting dictionary of sections with options that match constraints results, _ = self.constraint_options(args, options) for result in results: version_list = [results[result]['version']] if 'previous_versions' in results[result]: version_list = version_list + ( results[result]['previous_versions']).split(',') versions.append((result, version_list)) return versions # !! TODO name or group ? def current_version(self, name, namespace=None, branch="master"): """ Return current version for a given tool """ # initialize args = locals() versions = [] options = ['version'] # get resulting dictionary of sections with options that match constraints results, _ = self.constraint_options(args, options) for result in results: versions.append((result, results[result]['version'])) return versions # !! TODO name or group ? def state(self, name, namespace=None, branch="master"): """ Return state of a tool, disabled/enabled for each version """ # initialize args = locals() states = [] options = ['enabled'] # get resulting dictionary of sections with options that match constraints results, _ = self.constraint_options(args, options) for result in results: if results[result]['enabled'] == 'yes': states.append((result, 'enabled')) else: states.append((result, 'disabled')) return states # !! TODO name or group ? def enable(self, name, namespace=None, branch="master", version="HEAD"): """ Enable tool at a specific version, default to head """ # initialize args = locals() status = (False, None) # get resulting dictionary of sections with options that match constraints results, template = self.constraint_options(args, []) for result in results: status = template.set_option(result, 'enabled', 'yes') template.write_config() return status # !! TODO name or group ? def disable(self, name, namespace=None, branch="master", version="HEAD"): """ Disable tool at a specific version, default to head """ # initialize args = locals() status = (False, None) # get resulting dictionary of sections with options that match constraints results, template = self.constraint_options(args, []) for result in results: status = template.set_option(result, 'enabled', 'no') template.write_config() return status
class Plugin: """ Handle Plugins """ def __init__(self, **kargs): self.path_dirs = PathDirs(**kargs) self.manifest = os.path.join(self.path_dirs.meta_dir, "plugin_manifest.cfg") self.d_client = docker.from_env() self.logger = Logger(__name__) def apply_path(self, repo): """ Set path to where the repo is and return original path """ self.logger.info("Starting: apply_path") self.logger.info("repo given: "+str(repo)) status = (True, None) try: # rewrite repo for consistency if repo.endswith(".git"): repo = repo.split(".git")[0] # get org and repo name and path repo will be cloned to org, name = repo.split("/")[-2:] self.path = os.path.join(self.path_dirs.plugins_dir, org, name) self.logger.info("cloning to path: "+str(self.path)) # save current path cwd = os.getcwd() # set to new repo path os.chdir(self.path) status = (True, cwd) except Exception as e: self.logger.error("apply_path failed with error: "+str(e)) status = (False, e) self.logger.info("Status of apply_path: "+str(status)) self.logger.info("Finished: apply_path") return status def repo_branches(self, repo): """ Get the branches of a repository """ self.logger.info("Starting: repo_branches") self.logger.info("repo given: "+str(repo)) status = (True, None) branches = [] try: # switch to directory where repo will be cloned to status = self.apply_path(repo) if status[0]: cwd = status[1] else: self.logger.info("apply_path failed. Exiting repo_branches with status "+str(status)) return status junk = subprocess.check_output(shlex.split("git pull --all"), stderr=subprocess.STDOUT, close_fds=True) branch_output = subprocess.check_output(shlex.split("git branch -a"), stderr=subprocess.STDOUT, close_fds=True) branch_output = branch_output.split("\n") for branch in branch_output: b = branch.strip() if b.startswith('*'): b = b[2:] if "/" in b: branches.append(b.rsplit('/', 1)[1]) elif b: branches.append(b) branches = list(set(branches)) self.logger.info("branches found: "+str(branches)) for branch in branches: try: junk = subprocess.check_output(shlex.split("git checkout " + branch), stderr=subprocess.STDOUT, close_fds=True) except Exception as e: # pragma: no cover self.logger.error("repo_branches failed with error: "+str(e)+" on branch: "+str(branch)) status = (False, e) self.logger.info("Exiting repo_branches with status: "+str(status)) return status try: os.chdir(cwd) except Exception as e: # pragma: no cover self.logger.error("unable to change directory to: "+str(cwd)+"because: "+str(e)) status = (True, branches) except Exception as e: self.logger.error("repo_branches failed with error: "+str(e)) status = (False, e) self.logger.info("Status of repo_branches: "+str(status)) self.logger.info("Finished: repo_branches") return status def repo_commits(self, repo): """ Get the commit IDs for all of the branches of a repository """ self.logger.info("Starting: repo_commits") self.logger.info("repo given: "+str(repo)) status = (True, None) commits = [] try: status = self.apply_path(repo) # switch to directory where repo will be cloned to if status[0]: cwd = status[1] else: self.logger.info("apply_path failed. Exiting repo_commits with status: "+str(status)) return status status = self.repo_branches(repo) if status[0]: branches = status[1] for branch in branches: try: branch_output = subprocess.check_output(shlex.split("git rev-list " + branch), stderr=subprocess.STDOUT, close_fds=True) branch_output = ['HEAD'] + branch_output.split("\n")[:-1] commits.append((branch, branch_output)) except Exception as e: # pragma: no cover self.logger.error("repo_commits failed with error: "+str(e)+" on branch: "+str(branch)) status = (False, e) self.logger.info("Exiting repo_commits with status: "+str(status)) return status else: self.logger.info("repo_branches failed. Exiting repo_commits with status: "+str(status)) return status try: os.chdir(cwd) except Exception as e: # pragma: no cover self.logger.error("unable to change directory to: "+str(cwd)+" because: "+str(e)) status = (True, commits) except Exception as e: self.logger.error("repo_commits failed with error: "+str(e)) status = (False, e) self.logger.info("Status of repo_commits: "+str(status)) self.logger.info("Finished: repo_commits") return status def repo_tools(self, repo, branch, version): """ Get available tools for a repository branch at a version """ self.logger.info("Starting: repo_tools") self.logger.info("repo given: "+str(repo)) self.logger.info("branch given: "+str(branch)) self.logger.info("version given: "+str(version)) status = (True, None) try: tools = [] status = self.apply_path(repo) # switch to directory where repo will be cloned to if status[0]: cwd = status[1] else: self.logger.info("apply_path failed. Exiting repo_tools with status: "+str(status)) return status self.branch = branch self.version = version status = self.checkout() if status[0]: tools = self._available_tools() else: self.logger.info("checkout failed. Exiting repo_tools with status: "+str(status)) return status try: os.chdir(cwd) except Exception as e: # pragma: no cover self.logger.error("unable to change directory to: "+str(cwd)+" because: "+str(e)) status = (True, tools) except Exception as e: self.logger.error("repo_tools failed with error: "+str(e)) status = (False, e) self.logger.info("Status of repo_tools: "+str(status)) self.logger.info("Finished: repo_tools") return status def clone(self, repo, user=None, pw=None): """ Clone the repository """ self.logger.info("Starting: clone") self.logger.info("repo given: "+str(repo)) self.logger.info("user given: "+str(user)) status = (True, None) try: self.org = None self.name = None self.repo = repo # save current path cwd = os.getcwd() self.logger.info("current working directory: "+str(cwd)) # rewrite repo for consistency if self.repo.endswith(".git"): self.repo = self.repo.split(".git")[0] # get org and repo name and path repo will be cloned to self.org, self.name = self.repo.split("/")[-2:] self.logger.info("org name found: "+str(self.org)) self.logger.info("repo name found: "+str(self.name)) self.path = os.path.join(self.path_dirs.plugins_dir, self.org, self.name) self.logger.info("path to clone to: "+str(self.path)) # check if the directory exists, if so return now status = self.path_dirs.ensure_dir(self.path) if not status[0]: self.logger.info("ensure_dir failed. Exiting clone with status: "+str(status)) return status # set to new repo path os.chdir(self.path) # if path already exists, try git checkout to update if status[0] and status[1] == 'exists': try: response = subprocess.check_output(shlex.split("git -C "+self.path+" rev-parse"), stderr=subprocess.STDOUT, close_fds=True) self.logger.info("path already exists: "+str(self.path)) status = (True, cwd) self.logger.info("Status of clone: "+str(status)) self.logger.info("Finished: clone") return status except Exception as e: # pragma: no cover self.logger.error("unable to checkout: "+str(path)+" because: "+str(e)) status = (False, e) self.logger.info("Exiting clone with status: "+str(status)) return status # ensure cloning still works even if ssl is broken...probably should be improved response = subprocess.check_output(shlex.split("git config --global http.sslVerify false"), stderr=subprocess.STDOUT, close_fds=True) # check if user and pw were supplied, typically for private repos if user and pw: # only https is supported when using user/pw repo = 'https://'+user+':'+pw+'@'+self.repo.split("https://")[-1] # clone repo and build tools response = subprocess.check_output(shlex.split("git clone --recursive " + repo + " ."), stderr=subprocess.STDOUT, close_fds=True) status = (True, cwd) except Exception as e: self.logger.error("clone failed with error: "+str(e)) status = (False, e) self.logger.info("Status of clone: "+str(status)) self.logger.info("Finished: clone") return status def add(self, repo, tools=None, overrides=None, version="HEAD", branch="master", build=True, user=None, pw=None, groups=None, version_alias=None, wild=None, remove_old=True, disable_old=True, limit_groups=None): """ Adds a plugin of tool(s) tools is a list of tuples, where the pair is a tool name (path to Dockerfile) and version tools are for explicitly limiting which tools and versions (if version in tuple is '', then defaults to version) overrides is a list of tuples, where the pair is a tool name (path to Dockerfile) and a version overrides are for explicitly removing tools and overriding versions of tools (if version in tuple is '', then tool is removed, otherwise that tool is checked out at the specific version in the tuple) if tools and overrides are left as empty lists, then all tools in the repo are pulled down at the version and branch specified or defaulted to version is globally set for all tools, unless overridden in tools or overrides branch is globally set for all tools build is a boolean of whether or not to build the tools now user is the username for a private repo if needed pw is the password to go along with the username for a private repo groups is globally set for all tools version_alias is globally set for all tools and is a mapping from a friendly version tag to the real version commit ID wild lets you specify individual overrides for additional values in the tuple of tools or overrides. wild is a list containing one or more of the following: branch, build, groups, version_alias the order of the items in the wild list will expect values to be tacked on in the same order to the tuple for tools and overrides in additional to the tool name and version remove_old lets you specify whether or not to remove previously found tools that match to ones being added currently (note does not stop currently running instances of the older version) disable_old lets you specify whether or not to disable previously found tools that match to ones being added currently (note does not stop currently running instances of the older version) limit_groups is a list of groups to build tools for that match group names in vent.template of each tool if exists Examples: repo=fe (get all tools from repo 'fe' at version 'HEAD' on branch 'master') repo=foo, version="3d1f", branch="foo" (get all tools from repo 'foo' at verion '3d1f' on branch 'foo') repo=foo, tools=[('bar', ''), ('baz', '1d32')] (get only 'bar' from repo 'foo' at version 'HEAD' on branch 'master' and 'baz' from repo 'foo' at version '1d32' on branch 'master', ignore all other tools in repo 'foo') repo=foo overrides=[('baz/bar', ''), ('.', '1c4e')], version='4fad' (get all tools from repo 'foo' at verion '4fad' on branch 'master' except 'baz/bar' and for tool '.' get version '1c4e') repo=foo tools=[('bar', '1a2d')], overrides=[('baz', 'f2a1')] (not a particularly useful example, but get 'bar' from 'foo' at version '1a2d' and get 'baz' from 'foo' at version 'f2a1' on branch 'master', ignore all other tools) """ # initialize and store class objects self.tools = tools self.overrides = overrides self.version = version self.branch = branch self.build = build self.groups = groups # TODO these need to be implemented self.version_alias = version_alias self.wild = wild self.remove_old = remove_old self.disable_old = disable_old self.limit_groups = limit_groups status = (True, None) status_code, cwd = self.clone(repo, user=user, pw=pw) status = self._build_tools(status_code) # set back to original path try: os.chdir(cwd) except Exception as e: # pragma: no cover pass return status @ErrorHandler def builder(self, template, match_path, image_name, section, build=None, branch=None, version=None): """ Build tools """ self.logger.info("Starting: builder") self.logger.info("install path: "+str(match_path)) self.logger.info("image name: "+str(image_name)) self.logger.info("build: "+str(build)) self.logger.info("branch: "+str(branch)) self.logger.info("version: "+str(version)) if build: self.build = build elif not hasattr(self, 'build'): self.build = True if branch: self.branch = branch elif not hasattr(self, 'branch'): self.branch = 'master' if version: self.version = version elif not hasattr(self, 'version'): self.version = 'HEAD' cwd = os.getcwd() self.logger.info("current working directory: "+str(cwd)) try: os.chdir(match_path) except Exception as e: self.logger.error("unable to change to directory: "+str(match_path)+" because: "+str(e)) return None template = self._build_image(template, match_path, image_name, section) try: os.chdir(cwd) except Exception as e: # pragma: no cover self.logger.error("unable to change to directory: "+str(cwd)+" because: "+str(e)) self.logger.info("template of builder: "+str(template)) self.logger.info("Finished: builder") return template def _build_tools(self, status): """ Create list of tools, paths, and versions to be built and sends them to build_manifest """ response = (True, None) # !! TODO implement features: wild, remove_old, disable_old, limit_groups # check result of clone, ensure successful or that it already exists if status: response = self.checkout() if response[0]: matches = [] if self.tools is None and self.overrides is None: # get all tools matches = self._available_tools() elif self.tools is None: # there's only something in overrides # grab all the tools then apply overrides matches = self._available_tools() # !! TODO apply overrides to matches elif self.overrides is None: # there's only something in tools # only grab the tools specified matches = self.get_tool_matches() else: # both tools and overrides were specified # grab only the tools specified, with the overrides applied orig_matches = self.get_tool_matches() matches = orig_matches for override in self.overrides: override_t = None if override[0] == '.': override_t = ('', override[1]) else: override_t = override for match in orig_matches: if override_t[0] == match[0]: matches.remove(match) matches.append(override_t) if len(matches) > 0: self._build_manifest(matches) else: response = (False, status) return response def get_tool_matches(self): """ Get the tools paths and versions that were specified by self.tools and self.version """ matches = [] if not hasattr(self, 'tools'): self.tools = [] if not hasattr(self, 'version'): self.version = 'HEAD' for tool in self.tools: match_version = self.version if tool[1] != '': match_version = tool[1] match = '' if tool[0].endswith('/'): match = tool[0][:-1] elif tool[0] != '.': match = tool[0] if not match.startswith('/') and match != '': match = '/'+match matches.append((match, match_version)) return matches def _build_manifest(self, matches): """ Builds and writes the manifest for the tools being added """ # !! TODO check for pre-existing that conflict with request and disable and/or remove image for match in matches: template = Template(template=self.manifest) # !! TODO check for special settings here first for the specific match self.version = match[1] response = self.checkout() if response[0]: section = self.org + ":" + self.name + ":" + match[0] + ":" + self.branch + ":" + self.version match_path = self.path + match[0] image_name = self.org + "-" + self.name + "-" if match[0] != '': # if tool is in a subdir, add that to the name of the image image_name += '-'.join(match[0].split('/')[1:]) + "-" image_name += self.branch + ":" + self.version # check if the section already exists exists, options = template.section(section) previous_commit = None previous_commits = None head = False if exists: for option in options: # TODO check if tool name but a different version exists - then disable/remove if set if option[0] == 'version' and option[1] == 'HEAD': head = True if option[0] == 'built' and option[1] == 'yes': # !! TODO remove pre-existing image pass if option[0] == 'commit_id': previous_commit = option[1] if option[0] == 'previous_versions': previous_commits = option[1] # !! TODO # check if section should be removed from config - i.e. all tools, # but new commit removed one that was in a previous commit # set template section and options for tool at version and branch template.add_section(section) template.set_option(section, "name", match[0].split('/')[-1]) template.set_option(section, "namespace", self.org+'/'+self.name) template.set_option(section, "path", match_path) template.set_option(section, "repo", self.repo) template.set_option(section, "enabled", "yes") template.set_option(section, "branch", self.branch) template.set_option(section, "version", self.version) template.set_option(section, "last_updated", str(datetime.datetime.utcnow()) + " UTC") template.set_option(section, "image_name", image_name) vent_template = Template(template=os.path.join(match_path, 'vent.template')) vent_status, response = vent_template.option("info", "name") if vent_status: template.set_option(section, "link_name", response) else: template.set_option(section, "link_name", match[0].split('/')[-1]) commit_id = None if self.version == 'HEAD': os.chdir(match_path) commit_id = subprocess.check_output(shlex.split("git rev-parse --short HEAD"), stderr=subprocess.STDOUT, close_fds=True).strip() template.set_option(section, "commit_id", commit_id) if head: # no need to store previous commits if not HEAD, since # the version will always be the same commit ID if previous_commit and previous_commit != commit_id: if previous_commits and previous_commit not in previous_commits: previous_commits = previous_commit+','+previous_commits elif not previous_commits: previous_commits = previous_commit if previous_commits and previous_commits != commit_id: template.set_option(section, "previous_versions", previous_commits) if self.version_alias: template.set_option(section, "version_alias", self.version_alias) if self.groups: template.set_option(section, "groups", self.groups) else: vent_template = os.path.join(match_path, 'vent.template') if os.path.exists(vent_template): v_template = Template(template=vent_template) groups = v_template.option("info", "groups") if groups[0]: template.set_option(section, "groups", groups[1]) template = self._build_image(template, match_path, image_name, section) # write out configuration to the manifest file template.write_config() # reset to repo directory os.chdir(self.path) return def _build_image(self, template, match_path, image_name, section): """ Build docker images and store results in template """ # !! TODO return status of whether it built successfully or not if self.build: try: os.chdir(match_path) # currently can't use docker-py because it doesn't support labels on images yet name = template.option(section, "name") groups = template.option(section, "groups") if groups[1] == "" or not groups[0]: groups = (True, "none") if not name[0]: name = (True, image_name) # pull if '/' in image_name, fallback to build pull = False if '/' in image_name: try: self.logger.info("Trying to pull "+image_name) output = subprocess.check_output(shlex.split("docker pull "+image_name), stderr=subprocess.STDOUT, close_fds=True) self.logger.info("Pulling "+name[1]+"\n"+str(output)) for line in output.split('\n'): if line.startswith("Digest: sha256:"): image_id = line.split("Digest: sha256:")[1][:12] if image_id: template.set_option(section, "built", "yes") template.set_option(section, "image_id", image_id) template.set_option(section, "last_updated", str(datetime.datetime.utcnow()) + " UTC") status = (True, "Pulled "+image_name) self.logger.info(str(status)) else: template.set_option(section, "built", "failed") template.set_option(section, "last_updated", str(datetime.datetime.utcnow()) + " UTC") status = (False, "Failed to pull image "+str(output.split('\n')[-1])) self.logger.warning(str(status)) pull = True except Exception as e: # pragma: no cover self.logger.warning("Failed to pull image, going to build instead: "+str(e)) if not pull: output = subprocess.check_output(shlex.split("docker build --label vent --label vent.name="+name[1]+" --label vent.groups="+groups[1]+" -t " + image_name + " ."), stderr=subprocess.STDOUT, close_fds=True) self.logger.info("Building "+name[1]+"\n"+str(output)) image_id = "" for line in output.split("\n"): if line.startswith("Successfully built "): image_id = line.split("Successfully built ")[1].strip() template.set_option(section, "built", "yes") template.set_option(section, "image_id", image_id) template.set_option(section, "last_updated", str(datetime.datetime.utcnow()) + " UTC") except Exception as e: # pragma: no cover self.logger.error("unable to build image: "+str(image_name)+" because: "+str(e)) template.set_option(section, "built", "failed") template.set_option(section, "last_updated", str(datetime.datetime.utcnow()) + " UTC") else: template.set_option(section, "built", "no") template.set_option(section, "last_updated", str(datetime.datetime.utcnow()) + " UTC") return template def _available_tools(self, groups=None): """ Return list of possible tools in repo for the given version and branch """ matches = [] if not hasattr(self, 'path'): return matches if groups: groups = groups.split(",") for root, dirnames, filenames in os.walk(self.path): for filename in fnmatch.filter(filenames, 'Dockerfile'): # !! TODO deal with wild/etc.? if groups: try: template = Template(template=os.path.join(root, 'vent.template')) for group in groups: template_groups = template.option("info", "groups") if template_groups[0] and group in template_groups[1]: matches.append((root.split(self.path)[1], self.version)) except Exception as e: # pragma: no cover pass else: matches.append((root.split(self.path)[1], self.version)) return matches def checkout(self): """ Checkout a specific version and branch of a repo """ if not hasattr(self, 'branch'): self.branch = 'master' if not hasattr(self, 'version'): self.version = 'HEAD' response = (True, None) try: status = subprocess.check_output(shlex.split("git checkout " + self.branch), stderr=subprocess.STDOUT, close_fds=True) status = subprocess.check_output(shlex.split("git pull"), stderr=subprocess.STDOUT, close_fds=True) status = subprocess.check_output(shlex.split("git reset --hard " + self.version), stderr=subprocess.STDOUT, close_fds=True) response = (True, status) except Exception as e: # pragma: no cover response = (False, os.getcwd()+str(e)) return response @staticmethod def add_image(image, tag="latest"): """ Add an image from a registry/hub rather than building from a repository """ # !! TODO return def constraint_options(self, constraint_dict, options): """ Return result of constraints and options against a template """ constraints = {} template = Template(template=self.manifest) for constraint in constraint_dict: if constraint != 'self': if constraint_dict[constraint] or constraint_dict[constraint] == '': constraints[constraint] = constraint_dict[constraint] results = template.constrained_sections(constraints=constraints, options=options) return results, template def tools(self): """ Return list of tuples of all tools """ tools = [] template = Template(template=self.manifest) exists, sections = template.sections() if exists: for section in sections: options = {'section':section, 'enabled':None, 'built':None, 'version':None, 'repo':None, 'branch':None, 'name':None, 'groups':None, 'image_name':None} for option in options.keys(): exists, value = template.option(section, option) if exists: options[option] = value tools.append(options) return tools def remove(self, name=None, repo=None, namespace=None, branch="master", groups=None, enabled="yes", version="HEAD", built="yes"): """ Remove tool (name) or repository, repository is the url. If no arguments are specified, all tools will be removed for the defaults. """ # initialize args = locals() status = (True, None) # get resulting dictionary of sections with options that match constraints results, template = self.constraint_options(args, []) for result in results: response, image_name = template.option(result, 'image_name') # check for container and remove container_name = image_name.replace(':', '-').replace('/', '-') try: container = self.d_client.containers.get(container_name) response = container.remove(v=True, force=True) self.logger.info(response) self.logger.info("Removing plugin container: "+container_name) except Exception as e: # pragma: no cover self.logger.warn("Unable to remove the plugin container: " + container_name + " because: " + str(e)) # check for image and remove try: response = self.d_client.images.remove(image_name) self.logger.info(response) self.logger.info("Removing plugin image: "+image_name) except Exception as e: # pragma: no cover self.logger.warn("Unable to remove the plugin image: " + image_name + " because: " + str(e)) # remove tool from the manifest status = template.del_section(result) self.logger.info("Removing plugin tool: "+result) # TODO if all tools from a repo have been removed, remove the repo template.write_config() return status def update(self, name=None, repo=None, namespace=None, branch=None, groups=None): """ Update tool (name) or repository, repository is the url. If no arguments are specified, all tools will be updated """ # initialize args = locals() status = (False, None) options = ['branch', 'groups', 'image_name'] # get resulting dictionary of sections with options that match constraints results, template = self.constraint_options(args, options) for result in results: # check for container and remove try: container_name = results['image_name'].replace(':', '-') \ .replace('/', '-') container = self.d_client.containers.get(container_name) response = container.remove(v=True, force=True) except Exception as e: # pragma: no cover pass # TODO git pull # TODO build # TODO docker pull # TODO update tool in the manifest self.logger.info("Updating plugin tool: "+result) template.write_config() return status # !! TODO name or group ? def versions(self, name, namespace=None, branch="master"): """ Return available versions of a tool """ # initialize args = locals() versions = [] options = ['version', 'previous_versions'] # get resulting dictionary of sections with options that match constraints results, _ = self.constraint_options(args, options) for result in results: version_list = [results[result]['version']] if 'previous_versions' in results[result]: version_list = version_list+(results[result]['previous_versions']).split(',') versions.append((result, version_list)) return versions # !! TODO name or group ? def current_version(self, name, namespace=None, branch="master"): """ Return current version for a given tool """ # initialize args = locals() versions = [] options = ['version'] # get resulting dictionary of sections with options that match constraints results, _ = self.constraint_options(args, options) for result in results: versions.append((result, results[result]['version'])) return versions # !! TODO name or group ? def state(self, name, namespace=None, branch="master"): """ Return state of a tool, disabled/enabled for each version """ # initialize args = locals() states = [] options = ['enabled'] # get resulting dictionary of sections with options that match constraints results, _ = self.constraint_options(args, options) for result in results: if results[result]['enabled'] == 'yes': states.append((result, 'enabled')) else: states.append((result, 'disabled')) return states # !! TODO name or group ? def enable(self, name, namespace=None, branch="master", version="HEAD"): """ Enable tool at a specific version, default to head """ # initialize args = locals() status = (False, None) # get resulting dictionary of sections with options that match constraints results, template = self.constraint_options(args, []) for result in results: status = template.set_option(result, 'enabled', 'yes') template.write_config() return status # !! TODO name or group ? def disable(self, name, namespace=None, branch="master", version="HEAD"): """ Disable tool at a specific version, default to head """ # initialize args = locals() status = (False, None) # get resulting dictionary of sections with options that match constraints results, template = self.constraint_options(args, []) for result in results: status = template.set_option(result, 'enabled', 'no') template.write_config() return status
class PluginHelper: """ Handle helper functions for the Plugin class """ def __init__(self, **kargs): self.d_client = docker.from_env() self.path_dirs = PathDirs(**kargs) self.manifest = join(self.path_dirs.meta_dir, "plugin_manifest.cfg") self.logger = Logger(__name__) def constraint_options(self, constraint_dict, options): """ Return result of constraints and options against a template """ constraints = {} template = Template(template=self.manifest) for constraint in constraint_dict: if constraint != 'self': if (constraint_dict[constraint] or constraint_dict[constraint] == ''): constraints[constraint] = constraint_dict[constraint] results = template.constrained_sections(constraints=constraints, options=options) return results, template def get_path(self, repo, core=False): """ Return the path for the repo """ if repo.endswith(".git"): repo = repo.split(".git")[0] org, name = repo.split("/")[-2:] path = self.path_dirs.plugins_dir path = join(path, org, name) return path, org, name def apply_path(self, repo): """ Set path to where the repo is and return original path """ self.logger.info("Starting: apply_path") self.logger.info("repo given: " + str(repo)) try: # rewrite repo for consistency if repo.endswith(".git"): repo = repo.split(".git")[0] # get org and repo name and path repo will be cloned to org, name = repo.split("/")[-2:] path = join(self.path_dirs.plugins_dir, org, name) self.logger.info("cloning to path: " + str(path)) # save current path cwd = getcwd() # set to new repo path self.path_dirs.ensure_dir(path) chdir(path) status = (True, cwd, path) except Exception as e: # pragma: no cover self.logger.error("apply_path failed with error: " + str(e)) status = (False, str(e)) self.logger.info("Status of apply_path: " + str(status)) self.logger.info("Finished: apply_path") return status def checkout(self, branch="master", version="HEAD"): """ Checkout a specific version and branch of a repo """ self.logger.info("Starting: checkout") self.logger.info("branch given: " + str(branch)) self.logger.info("version given: " + str(version)) try: status = check_output(shlex.split("git checkout " + branch), stderr=STDOUT, close_fds=True) status = check_output(shlex.split("git pull"), stderr=STDOUT, close_fds=True) status = check_output(shlex.split("git reset --hard " + version), stderr=STDOUT, close_fds=True) response = (True, status) except Exception as e: # pragma: no cover self.logger.error("checkout failed with error: " + str(e)) response = (False, str(e)) self.logger.info("Status of checkout: " + str(response)) self.logger.info("Finished: checkout") return response def clone(self, repo, user=None, pw=None): """ Clone the repository """ self.logger.info("Starting: clone") self.logger.info("repo given: " + str(repo)) self.logger.info("user given: " + str(user)) status = (True, None) try: status = self.apply_path(repo) # if path already exists, try git checkout to update if status[0]: self.logger.info("path to clone to: " + str(status[2])) try: check_output(shlex.split("git -C " + status[2] + " rev-parse"), stderr=STDOUT, close_fds=True) self.logger.info("path already exists: " + str(status[2])) self.logger.info("Status of clone: " + str(status[0])) self.logger.info("Finished: clone") chdir(status[1]) return (True, status[1]) except Exception as e: # pragma: no cover self.logger.info("repo doesn't exist, attempting to " + "clone: " + str(e)) else: self.logger.error("unable to clone") return status # ensure cloning still works even if ssl is broken cmd = "git config --global http.sslVerify false" check_output(shlex.split(cmd), stderr=STDOUT, close_fds=True) # check if user and pw were supplied, typically for private repos if user and pw: # only https is supported when using user/pw auth_repo = 'https://' + user + ':' + pw + '@' repo = auth_repo + repo.split("https://")[-1] # clone repo and build tools check_output(shlex.split("git clone --recursive " + repo + " ."), stderr=STDOUT, close_fds=True) chdir(status[1]) status = (True, status[1]) except Exception as e: # pragma: no cover e_str = str(e) # scrub username and password from error message if e_str.find('@') >= 0: e_str = e_str[:e_str.find('//') + 2] + \ e_str[e_str.find('@') + 1:] self.logger.error("clone failed with error: " + e_str) status = (False, e_str) self.logger.info("Status of clone: " + str(status)) self.logger.info("Finished: clone") return status def available_tools(self, path, version="HEAD", groups=None): """ Return list of possible tools in repo for the given version and branch """ matches = [] if groups: groups = groups.split(",") for root, _, filenames in walk(path): files = fnmatch.filter(filenames, 'Dockerfile*') # append additional identifiers to tools if multiple in same # directory add_info = len(files) > 1 for f in files: # !! TODO deal with wild/etc.? addtl_info = '' if add_info: # @ will be delimiter symbol for multi-tools try: addtl_info = '@' + f.split('.')[1] except Exception as e: addtl_info = '@unspecified' if groups: if add_info and not addtl_info == '@unspecified': tool_template = addtl_info.split('@')[1] + '.template' else: tool_template = 'vent.template' try: template = Template(template=join(root, tool_template)) for group in groups: template_groups = template.option("info", "groups") if (template_groups[0] and group in template_groups[1]): matches.append((root.split(path)[1] + addtl_info, version)) except Exception as e: # pragma: no cover self.logger.info("error: " + str(e)) else: matches.append((root.split(path)[1] + addtl_info, version)) return matches @staticmethod def tool_matches(tools=None, version='HEAD'): """ Get the tools paths and versions that were specified """ matches = [] if tools: for tool in tools: match_version = version if tool[1] != '': match_version = tool[1] match = '' if tool[0].endswith('/'): match = tool[0][:-1] elif tool[0] != '.': match = tool[0] if not match.startswith('/') and match != '': match = '/'+match matches.append((match, match_version)) return matches def start_sections(self, s, files, groups, enabled, branch, version): """ Run through sections for prep_start """ tool_d = {} status = (True, None) for section in s: # initialize needed vars c_name = s[section]['image_name'].replace(':', '-') c_name = c_name.replace('/', '-') instance_num = re.search(r'\d+$', s[section]['name']) if instance_num: c_name += instance_num.group() image_name = s[section]['image_name'] # checkout the right version and branch of the repo cwd = getcwd() self.logger.info("current directory is: " + str(cwd)) # images built from registry won't have path if s[section]['path'] != '': chdir(join(s[section]['path'])) status = self.checkout(branch=branch, version=version) self.logger.info(status) chdir(cwd) tool_d[c_name] = {'image': image_name, 'name': c_name} # get rid of all commented sections in various runtime # configurations manifest = Template(self.manifest) overall_dict = {} for setting in ['info', 'docker', 'gpu', 'settings', 'service']: option = manifest.option(section, setting) if option[0]: overall_dict[setting] = {} settings_dict = json.loads(option[1]) for opt in settings_dict: if not opt.startswith('#'): overall_dict[setting][opt] = \ settings_dict[opt] if 'docker' in overall_dict: options_dict = overall_dict['docker'] for option in options_dict: options = options_dict[option] # check for commands to evaluate if '`' in options: cmds = options.split('`') if len(cmds) > 2: i = 1 while i < len(cmds): try: cmds[i] = check_output(shlex.split(cmds[i]), stderr=STDOUT, close_fds=True).strip() except Exception as e: # pragma: no cover self.logger.error("unable to evaluate command specified in vent.template: " + str(e)) i += 2 options = "".join(cmds) # store options set for docker try: tool_d[c_name][option] = literal_eval(options) except Exception as e: # pragma: no cover self.logger.info("unable to literal_eval: " + str(options)) tool_d[c_name][option] = options if 'labels' not in tool_d[c_name]: tool_d[c_name]['labels'] = {} # get the service uri info if 'service' in overall_dict: try: options_dict = overall_dict['service'] for option in options_dict: tool_d[c_name]['labels'][option] = options_dict[option] except Exception as e: # pragma: no cover self.logger.error("unable to store service options for " "docker: " + str(e)) # check for gpu settings if 'gpu' in overall_dict: try: options_dict = json.loads(status[1]) for option in options_dict: tool_d[c_name]['labels']['gpu.'+option] = options_dict[option] except Exception as e: # pragma: no cover self.logger.error("unable to store gpu options for " "docker: " + str(e)) # get temporary name for links, etc. plugin_c = Template(template=self.manifest) status, plugin_sections = plugin_c.sections() self.logger.info(status) for plugin_section in plugin_sections: status = plugin_c.option(plugin_section, "link_name") self.logger.info(status) image_status = plugin_c.option(plugin_section, "image_name") self.logger.info(image_status) if status[0] and image_status[0]: cont_name = image_status[1].replace(':', '-') cont_name = cont_name.replace('/', '-') if cont_name not in tool_d: tool_d[cont_name] = {'image': image_status[1], 'name': cont_name, 'start': False} tool_d[cont_name]['tmp_name'] = status[1] # add extra labels tool_d[c_name]['labels']['vent'] = Version() tool_d[c_name]['labels']['vent.namespace'] = s[section]['namespace'] tool_d[c_name]['labels']['vent.branch'] = branch tool_d[c_name]['labels']['vent.version'] = version tool_d[c_name]['labels']['vent.name'] = s[section]['name'] tool_d[c_name]['labels']['vent.section'] = section tool_d[c_name]['labels']['vent.repo'] = s[section]['repo'] tool_d[c_name]['labels']['vent.type'] = s[section]['type'] # check for log_config settings in external-services externally_configured = False vent_config = Template(self.path_dirs.cfg_file) for ext_tool in vent_config.section('external-services')[1]: if ext_tool[0].lower() == 'syslog': try: log_dict = json.loads(ext_tool[1]) # configure if not locally active if ('locally_active' not in log_dict or log_dict['locally_active'] == 'no'): del log_dict['locally_active'] log_config = {} log_config['type'] = 'syslog' log_config['config'] = {} ip_address = '' port = '' for option in log_dict: if option == 'ip_address': ip_address = log_dict[option] elif option == 'port': port = log_dict['port'] syslog_address = 'tcp://' + ip_address + ':' + port syslog_config = {'syslog-address': syslog_address, 'syslog-facility': 'deamon', 'tag': 'plugin'} log_config['config'].update(syslog_config) externally_configured = True except Exception as e: # pragma: no cover self.logger.error("external settings for log_config" " couldn't be stored because: " + str(e)) externally_configured = False if not externally_configured: log_config = {'type': 'syslog', 'config': {'syslog-address': 'tcp://0.0.0.0:514', 'syslog-facility': 'daemon', 'tag': 'plugin'}} if 'groups' in s[section]: # add labels for groups tool_d[c_name]['labels']['vent.groups'] = s[section]['groups'] # add restart=always to core containers if 'core' in s[section]['groups']: tool_d[c_name]['restart_policy'] = {"Name": "always"} # map network names to environment variables if 'network' in s[section]['groups']: vent_config = Template(template=self.path_dirs.cfg_file) nic_mappings = vent_config.section('network-mapping') nics = '' if nic_mappings[0]: for nic in nic_mappings[1]: nics += nic[0] + ":" + nic[1] + "," nics = nics[:-1] if nics: if 'environment' in tool_d[c_name]: tool_d[c_name]['environment'].append("VENT_NICS="+nics) else: tool_d[c_name]['environment'] = ["VENT_NICS="+nics] # send logs to syslog if ('syslog' not in s[section]['groups'] and 'core' in s[section]['groups']): log_config['config']['tag'] = 'core' tool_d[c_name]['log_config'] = log_config if 'syslog' not in s[section]['groups']: tool_d[c_name]['log_config'] = log_config # mount necessary directories if 'files' in s[section]['groups']: if 'volumes' in tool_d[c_name]: tool_d[c_name]['volumes'][self.path_dirs.base_dir[:-1]] = {'bind': '/vent', 'mode': 'ro'} else: tool_d[c_name]['volumes'] = {self.path_dirs.base_dir[:-1]: {'bind': '/vent', 'mode': 'ro'}} if files[0]: tool_d[c_name]['volumes'][files[1]] = {'bind': '/files', 'mode': 'rw'} else: tool_d[c_name]['log_config'] = log_config # add label for priority if 'settings' in overall_dict: try: options_dict = overall_dict['settings'] for option in options_dict: if option == 'priority': tool_d[c_name]['labels']['vent.priority'] = options_dict[option] except Exception as e: # pragma: no cover self.logger.error("unable to store settings options " "for docker " + str(e)) # only start tools that have been built if s[section]['built'] != 'yes': del tool_d[c_name] # store section information for adding info to manifest later else: tool_d[c_name]['section'] = section return status, tool_d def prep_start(self, repo=None, name=None, groups=None, enabled="yes", branch="master", version="HEAD"): """ Start a set of tools that match the parameters given, if no parameters are given, start all installed tools on the master branch at verison HEAD that are enabled """ args = locals() self.logger.info("Starting: prep_start") self.logger.info("Arguments: "+str(args)) status = (False, None) try: options = ['name', 'namespace', 'built', 'groups', 'path', 'image_name', 'branch', 'repo', 'type', 'version'] vent_config = Template(template=self.path_dirs.cfg_file) files = vent_config.option('main', 'files') files = (files[0], expanduser(files[1])) s, _ = self.constraint_options(args, options) status, tool_d = self.start_sections(s, files, groups, enabled, branch, version) # look out for links to delete because they're defined externally links_to_delete = set() # check and update links, volumes_from, network_mode for container in tool_d.keys(): if 'links' in tool_d[container]: for link in tool_d[container]['links']: # add links to external services already running if # necessary, by default configure local services too configure_local = True ext = 'external-services' if link in vent_config.options(ext)[1]: try: lconf = json.loads(vent_config.option(ext, link)[1]) if ('locally_active' not in lconf or lconf['locally_active'] == 'no'): ip_adr = lconf['ip_address'] port = lconf['port'] tool_d[container]['extra_hosts'] = {} # containers use lowercase names for # connections tool_d[container]['extra_hosts'][link.lower()] = ip_adr # create an environment variable for container # to access port later env_variable = link.upper() + \ "_CUSTOM_PORT=" + port if 'environment' not in tool_d[container]: tool_d[container]['environment'] = [] tool_d[container]['environment'].append(env_variable) # remove the entry from links because no # longer connecting to local container links_to_delete.add(link) configure_local = False except Exception as e: # pragma: no cover self.logger.error("couldn't load external" " settings because: " + str(e)) configure_local = True status = False if configure_local: for c in tool_d.keys(): if ('tmp_name' in tool_d[c] and tool_d[c]['tmp_name'] == link): tool_d[container]['links'][tool_d[c]['name']] = tool_d[container]['links'].pop(link) if 'volumes_from' in tool_d[container]: tmp_volumes_from = tool_d[container]['volumes_from'] tool_d[container]['volumes_from'] = [] for volumes_from in list(tmp_volumes_from): for c in tool_d.keys(): if ('tmp_name' in tool_d[c] and tool_d[c]['tmp_name'] == volumes_from): tool_d[container]['volumes_from'].append(tool_d[c]['name']) tmp_volumes_from.remove(volumes_from) tool_d[container]['volumes_from'] += tmp_volumes_from if 'network_mode' in tool_d[container]: if tool_d[container]['network_mode'].startswith('container:'): network_c_name = tool_d[container]['network_mode'].split('container:')[1] for c in tool_d.keys(): if ('tmp_name' in tool_d[c] and tool_d[c]['tmp_name'] == network_c_name): tool_d[container]['network_mode'] = 'container:' + tool_d[c]['name'] # remove tmp_names for c in tool_d.keys(): if 'tmp_name' in tool_d[c]: del tool_d[c]['tmp_name'] # remove links section if all were externally configured for c in tool_d.keys(): if 'links' in tool_d[c]: for link in links_to_delete: if link in tool_d[c]['links']: del tool_d[c]['links'][link] # delete links if no more defined if not tool_d[c]['links']: del tool_d[c]['links'] # remove containers that shouldn't be started for c in tool_d.keys(): deleted = False if 'start' in tool_d[c] and not tool_d[c]['start']: del tool_d[c] deleted = True if not deleted: # look for tools services that are being done externally # tools are capitalized in vent.cfg, so make them lowercase # for comparison ext = 'external-services' external_tools = vent_config.section(ext)[1] name = tool_d[c]['labels']['vent.name'] for tool in external_tools: if name == tool[0].lower(): try: tool_config = json.loads(tool[1]) if ('locally_active' in tool_config and tool_config['locally_active'] == 'no'): del tool_d[c] except Exception as e: # pragma: no cover self.logger.warn("Locally running container " + name + " may be redundant") if status: status = (True, tool_d) else: status = (False, tool_d) except Exception as e: # pragma: no cover self.logger.error("prep_start failed with error: "+str(e)) status = (False, e) self.logger.info("Status of prep_start: "+str(status[0])) self.logger.info("Finished: prep_start") return status def start_priority_containers(self, groups, group_orders, tool_d): """ Select containers based on priorities to start """ vent_cfg = Template(self.path_dirs.cfg_file) cfg_groups = vent_cfg.option('groups', 'start_order') if cfg_groups[0]: cfg_groups = cfg_groups[1].split(',') else: cfg_groups = [] all_groups = sorted(set(groups)) s_conts = [] f_conts = [] # start tools in order of group defined in vent.cfg for group in cfg_groups: # remove from all_groups because already checked out if group in all_groups: all_groups.remove(group) if group in group_orders: for cont_t in sorted(group_orders[group]): if cont_t[1] not in s_conts: s_conts, f_conts = self.start_containers(cont_t[1], tool_d, s_conts, f_conts) # start tools that haven't been specified in the vent.cfg, if any for group in all_groups: if group in group_orders: for cont_t in sorted(group_orders[group]): if cont_t[1] not in s_conts: s_conts, f_conts = self.start_containers(cont_t[1], tool_d, s_conts, f_conts) return (s_conts, f_conts) def start_remaining_containers(self, containers_remaining, tool_d): """ Select remaining containers that didn't have priorities to start """ s_containers = [] f_containers = [] for container in containers_remaining: s_containers, f_containers = self.start_containers(container, tool_d, s_containers, f_containers) return (s_containers, f_containers) def start_containers(self, container, tool_d, s_containers, f_containers): """ Start container that was passed in and return status """ # use section to add info to manifest section = tool_d[container]['section'] del tool_d[container]['section'] manifest = Template(self.manifest) try: c = self.d_client.containers.get(container) c.start() s_containers.append(container) manifest.set_option(section, 'running', 'yes') self.logger.info("started " + str(container) + " with ID: " + str(c.short_id)) except Exception as err: try: gpu = 'gpu.enabled' failed = False if (gpu in tool_d[container]['labels'] and tool_d[container]['labels'][gpu] == 'yes'): vent_config = Template(template=self.path_dirs.cfg_file) port = '' host = '' result = vent_config.option('nvidia-docker-plugin', 'port') if result[0]: port = result[1] else: port = '3476' result = vent_config.option('nvidia-docker-plugin', 'host') if result[0]: host = result[1] else: # now just requires ip, ifconfig try: route = check_output(('ip', 'route')).split('\n') default = '' # grab the default network device. for device in route: if 'default' in device: default = device.split()[4] break # grab the IP address for the default device ip_addr = check_output(('ifconfig', default)) ip_addr = ip_addr.split('\n')[1].split()[1] host = ip_addr except Exception as e: # pragma no cover self.logger.error('failed to grab ip. Ensure that \ ip and ifconfig are installed') nd_url = 'http://' + host + ':' + port + '/v1.0/docker/cli' params = {'vol': 'nvidia_driver'} r = requests.get(nd_url, params=params) if r.status_code == 200: options = r.text.split() for option in options: if option.startswith('--volume-driver='): tool_d[container]['volume_driver'] = option.split("=", 1)[1] elif option.startswith('--volume='): vol = option.split("=", 1)[1].split(":") if 'volumes' in tool_d[container]: # !! TODO handle if volumes is a list tool_d[container]['volumes'][vol[0]] = {'bind': vol[1], 'mode': vol[2]} else: tool_d[container]['volumes'] = {vol[0]: {'bind': vol[1], 'mode': vol[2]}} elif option.startswith('--device='): dev = option.split("=", 1)[1] if 'devices' in tool_d[container]: tool_d[container]['devices'].append(dev + ":" + dev + ":rwm") else: tool_d[container]['devices'] = [dev + ":" + dev + ":rwm"] else: self.logger.error("Unable to parse " + "nvidia-docker option: " + str(option)) else: failed = True f_containers.append(container) manifest.set_option(section, 'running', 'failed') self.logger.error("failed to start " + str(container) + " because nvidia-docker-plugin " + "failed with: " + str(r.status_code)) if not failed: cont_id = self.d_client.containers.run(detach=True, **tool_d[container]) s_containers.append(container) manifest.set_option(section, 'running', 'yes') self.logger.info("started " + str(container) + " with ID: " + str(cont_id)) except Exception as e: # pragma: no cover f_containers.append(container) manifest.set_option(section, 'running', 'failed') self.logger.error("failed to start " + str(container) + " because: " + str(e)) # save changes made to manifest manifest.write_config() return s_containers, f_containers