def create_payloads(self): ''' Create all missing data payloads in current directory Doesn't compute md5 during creation because tarball can be created manually Also create symlink to versionned payload ''' arrow("Creating payloads") for payload_name in self.select_payloads(): paydesc = self.describe_payload(payload_name) if exists(paydesc["link_path"]): continue arrow(payload_name, 1) try: # create non versionned payload file if not exists(paydesc["dest_path"]): if paydesc["isdir"]: self.create_payload_tarball(paydesc["dest_path"], paydesc["source_path"], paydesc["compressor"]) else: self.create_payload_file(paydesc["dest_path"], paydesc["source_path"], paydesc["compressor"]) # create versionned payload file if lexists(paydesc["link_path"]): unlink(paydesc["link_path"]) symlink(paydesc["dest_path"], paydesc["link_path"]) except Exception as e: # cleaning file in case of error if exists(paydesc["dest_path"]): unlink(paydesc["dest_path"]) if lexists(paydesc["link_path"]): unlink(paydesc["link_path"]) raise ISError(u"Unable to create payload %s" % payload_name, e)
def create_image(self, jdescription): ''' Create a script tarball in current directory ''' # create tarball arrow("Creating image tarball") arrowlevel(1) arrow(u"Name %s" % self.image_name) try: try: tarball = Tarball.open(self.image_name, mode="w:gz", dereference=True) except Exception as e: raise ISError(u"Unable to create tarball %s" % self.image_name, e) # add description.json arrow("Add description.json") tarball.add_str("description.json", jdescription, REGTYPE, 0644) # add changelog if self.changelog is not None: arrow("Add changelog") tarball.add_str("changelog", self.changelog.verbatim, REGTYPE, 0644) # add format arrow("Add format") tarball.add_str("format", self.format, REGTYPE, 0644) # add setup scripts self.add_scripts(tarball, self.setup_path) # add optional scripts for d in (self.build_path, self.parser_path, self.lib_path): if exists(d): self.add_scripts(tarball, d) # closing tarball file tarball.close() except (SystemExit, KeyboardInterrupt): if exists(self.image_name): unlink(self.image_name) arrowlevel(-1)
def download(self, directory, force=False, image=True, payload=False): ''' Download image in directory Doesn't use in memory image because we cannot access it This is done to don't parasitize self._tarfile access to memfile ''' # check if destination exists directory = abspath(directory) if image: dest = join(directory, self.filename) if not force and exists(dest): raise ISError(u"Image destination already exists: %s" % dest) # some display arrow(u"Downloading image in %s" % directory) debug(u"Downloading %s from %s" % (self.filename, self.path)) # open source fs = PipeFile(self.path, progressbar=True) # check if announced file size is good if fs.size is not None and self.size != fs.size: raise ISError(u"Downloading image %s failed: Invalid announced size" % self.name) # open destination fd = open(self.filename, "wb") fs.consume(fd) fs.close() fd.close() if self.size != fs.consumed_size: raise ISError(u"Download image %s failed: Invalid size" % self.name) if self.md5 != fs.md5: raise ISError(u"Download image %s failed: Invalid MD5" % self.name) if payload: for payname in self.payload: arrow(u"Downloading payload %s in %s" % (payname, directory)) self.payload[payname].info self.payload[payname].download(directory, force=force)
def clean(self, force=False): ''' Clean the repository's content ''' # Check if the repo is local if not self.local: raise ISError(u"Repository must be local") allmd5 = set(self.getallmd5()) repofiles = set(listdir(self.config.path)) - set([self.config.dbname, self.config.lastname]) dirtyfiles = repofiles - allmd5 if len(dirtyfiles) > 0: # print dirty files arrow("Dirty files:") for f in dirtyfiles: arrow(f, 1) # ask confirmation if not force and not confirm("Remove dirty files? (yes) "): raise ISError(u"Aborted!") # start cleaning arrow("Cleaning") for f in dirtyfiles: p = join(self.config.path, f) arrow(u"Removing %s" % p, 1) try: if isdir(p): rmdir(p) else: unlink(p) except: warn(u"Removing %s failed" % p) else: arrow("Nothing to clean")
def get(self, name, version=None): ''' Return an image from a name and version ''' # is no version take the last if version is None: version = self.last(name) if version is None: raise ISError(u"Unable to find image %s in %s" % (name, self.config.name)) # get file md5 from db r = self.db.ask("select md5 from image where name = ? and version = ? limit 1", (name, version)).fetchone() if r is None: raise ISError(u"Unable to find image %s v%s in %s" % (name, version, self.config.name)) path = join(self.config.path, r[0]) # getting the file arrow(u"Loading image %s v%s from repository %s" % (name, version, self.config.name)) memfile = StringIO() try: fo = PipeFile(path, "r") fo.consume(memfile) fo.close() except Exception as e: raise ISError(u"Loading image %s v%s failed" % (name, version), e) memfile.seek(0) pkg = PackageImage(path, fileobj=memfile, md5name=True) if pkg.md5 != r[0]: raise ISError(u"Image MD5 verification failure") return pkg
def search_image(self, pattern): ''' Search pattern accross all registered repositories ''' for repo in self.onlines: arrow(self[repo].config.name) self[repo].search(pattern)
def render_templates(target, context, tpl_ext=".istpl", force=False, keep=False): ''' Render templates according to tpl_ext Apply template mode/uid/gid to the generated file ''' for path in os.walk(target): for filename in path[2]: name, ext = os.path.splitext(filename) if ext == tpl_ext: tpl_path = os.path.join(path[0], filename) file_path = os.path.join(path[0], name) arrow(tpl_path) if os.path.exists(file_path) and not force: raise Exception(u"%s will be overwritten, cancel template " "generation (set force=True if you know " "what you do)" % file_path) try: with open(tpl_path) as tpl_file: template = jinja2.Template(tpl_file.read()) # use codecs to properly write unicode chars in file with codecs.open(file_path, 'w', encoding='utf-8') as rendered_file: rendered_file.write(template.render(context)) # add newline at the end of the file, because # jinja2 parser eat it and there is no option to # keep it until jinja2 2.7 # https://github.com/mitsuhiko/jinja2/pull/170 rendered_file.write(os.linesep) except Exception as e: raise Exception(u"Render template fail", e) st = os.stat(tpl_path) os.chown(file_path, st.st_uid, st.st_gid) os.chmod(file_path, st.st_mode) if not keep: os.unlink(tpl_path)
def diff(repo1, repo2): ''' Compute a diff between two repositories ''' arrow(u"Diff between repositories #y#%s#R# and #g#%s#R#" % (repo1.config.name, repo2.config.name)) # Get info from databases i_dict1 = dict((b[0], b[1:]) for b in repo1.db.ask( "SELECT md5, name, version FROM image").fetchall()) i_set1 = set(i_dict1.keys()) i_dict2 = dict((b[0], b[1:]) for b in repo2.db.ask( "SELECT md5, name, version FROM image").fetchall()) i_set2 = set(i_dict2.keys()) p_dict1 = dict((b[0], b[1:]) for b in repo1.db.ask( "SELECT md5, name FROM payload").fetchall()) p_set1 = set(p_dict1.keys()) p_dict2 = dict((b[0], b[1:]) for b in repo2.db.ask( "SELECT md5, name FROM payload").fetchall()) p_set2 = set(p_dict2.keys()) # computing diff i_only1 = i_set1 - i_set2 i_only2 = i_set2 - i_set1 p_only1 = p_set1 - p_set2 p_only2 = p_set2 - p_set1 # printing functions pimg = lambda r,c,m,d,: out("#%s#Image only in repository %s: %s v%s (%s)#R#" % (c, r.config.name, d[m][0], d[m][1], m)) ppay = lambda r,c,m,d,: out("#%s#Payload only in repository %s: %s (%s)#R#" % (c, r.config.name, d[m][0], m)) # printing image diff for md5 in i_only1: pimg(repo1, "y", md5, i_dict1) for md5 in p_only1: ppay(repo1, "y", md5, p_dict1) for md5 in i_only2: pimg(repo2, "g", md5, i_dict2) for md5 in p_only2: ppay(repo2, "g", md5, p_dict2)
def render_templates(target, context, tpl_ext=".istpl", force=False, keep=False): ''' Render templates according to tpl_ext Apply template mode/uid/gid to the generated file ''' for path in walk(target): for filename in path[2]: name, ext = splitext(filename) if ext == tpl_ext: tpl_path = join(path[0], filename) file_path = join(path[0], name) arrow(tpl_path) if exists(file_path) and not force: raise ISError(u"%s will be overwritten, cancel template " "generation (set force=True if you know " "what you do)" % file_path) try: with open(tpl_path) as tpl_file: template = Template(tpl_file.read()) with open(file_path, "w") as rendered_file: rendered_file.write(template.render(context)) except Exception as e: raise ISError(u"Render template fail", e) st = stat(tpl_path) chown(file_path, st.st_uid, st.st_gid) chmod(file_path, st.st_mode) if not keep: unlink(tpl_path)
def add_scripts(self, tarball, directory): ''' Add scripts inside a directory into a tarball ''' basedirectory = basename(directory) arrow(u"Add %s scripts" % basedirectory) arrowlevel(1) # adding base directory ti = tarball.gettarinfo(directory, arcname=basedirectory) ti.mode = 0755 ti.uid = ti.gid = 0 ti.uname = ti.gname = "" tarball.addfile(ti) # adding each file for fp, fn, fc in self.select_scripts(directory): # check input unicode stuff assert(isinstance(fp, unicode)) assert(isinstance(fn, unicode)) assert(isinstance(fc, str)) # add file into tarball tarball.add_str(join(basedirectory, fn), fc, REGTYPE, 0755, int(stat(fp).st_mtime)) arrow(u"%s added" % fn) arrowlevel(-1)
def cat(self, filename): ''' Display filename in the tarball ''' filelist = self._tarball.getnames(glob_pattern=filename, dir=False) if len(filelist) == 0: warn(u"No file matching %s" % filename) for filename in filelist: arrow(filename) out(self._tarball.get_utf8(filename))
def setmotd(self, value=""): ''' Set repository message of the day ''' # check local repository if not self.local: raise ISError(u"Repository must be local") arrow("Updating motd") self.db.ask("UPDATE repository SET motd = ?", (value,)) self.update_last()
def remove_payloads(self, paylist): ''' Remove payload list if exists ''' arrow("Removing payloads") for pay in paylist: arrow(pay, 1) desc = self.describe_payload(pay) for f in (desc["dest_path"], desc["link_path"]): if lexists(f): unlink(f)
def rm(path): path = os.path.normpath(path) if path.startswith('/'): unlink_path = os.path.join(namespace.target, path[1:]) else: unlink_path = os.path.join(namespace.target, path) try: os.unlink(unlink_path) arrow('%s removed' % path) except OSError: pass
def chroot(path, shell="/bin/bash", mount=True): ''' Chroot inside a directory and call shell if mount is true, mount /{proc,dev,sys} inside the chroot ''' # prepare to chroot prepare_chroot(path, mount) # chrooting arrow(u"Chrooting inside %s and running %s" % (path, shell)) call(["chroot", path, shell], close_fds=True) # revert preparation of chroot unprepare_chroot(path, mount)
def update_last(self): ''' Update last file to current time ''' # check local repository if not self.local: raise ISError(u"Repository must be local") try: arrow("Updating last file") last_path = join(self.config.path, self.config.lastname) open(last_path, "w").write("%s\n" % int(time())) chrights(last_path, self.config.uid, self.config.gid, self.config.fmod) except Exception as e: raise ISError(u"Update last file failed", e)
def delete(self, name, version, payloads=True): ''' Delete an image from repository ''' # check local repository if not self.local: raise ISError(u"Repository deletion must be local") # get md5 of files related to images (exception is raised if not exists md5s = self.getmd5(name, version) # cleaning db (must be done before cleaning) arrow("Cleaning database") arrow("Remove payloads from database", 1) self.db.begin() for md5 in md5s[1:]: self.db.ask("DELETE FROM payload WHERE md5 = ? AND image_md5 = ?", (md5, md5s[0])).fetchone() arrow("Remove image from database", 1) self.db.ask("DELETE FROM image WHERE md5 = ?", (md5s[0],)).fetchone() self.db.commit() # Removing files arrow("Removing files from pool") # if asked don't remove payloads if not payloads: md5s = [ md5s[0] ] arrowlevel(1) for md5 in md5s: self._remove_file(md5) arrowlevel(-1) # update last file self.update_last()
def _remove_file(self, filename): ''' Remove a filename from pool. Check if it's not needed by db before ''' # check existance in table image have = False for table in ("image", "payload"): have = have or self.db.ask(u"SELECT md5 FROM %s WHERE md5 = ? LIMIT 1" % table, (filename,)).fetchone() is not None # if no reference, delete! if not have: arrow(u"%s, deleted" % filename) unlink(join(self.config.path, filename)) else: arrow(u"%s, skipped" % filename)
def search(self, pattern): ''' Search pattern in a repository ''' images = self.db.ask("SELECT name, version, author, description\ FROM image\ WHERE name LIKE ? OR\ description LIKE ? OR\ author LIKE ?", tuple( [u"%%%s%%" % pattern ] * 3) ).fetchall() for name, version, author, description in images: arrow(u"%s v%s" % (name, version), 1) out(u" #yellow#Author:#reset# %s" % author) out(u" #yellow#Description:#reset# %s" % description)
def parse_changelog(self): ''' Create a changelog object from a file ''' # try to find a changelog file try: path = join(self.base_path, "changelog") fo = codecs.open(path, "r", "utf8") except IOError: return None # we have it, we need to check everything is ok arrow("Parsing changelog") try: cl = Changelog(fo.read()) except Exception as e: raise ISError(u"Bad changelog", e) return cl
def check_scripts(self, directory): ''' Check if scripts inside a directory can be compiled ''' basedirectory = basename(directory) arrow(u"Checking %s scripts" % basedirectory) arrowlevel(1) # checking each file for fp, fn, fc in self.select_scripts(directory): # check input unicode stuff assert(isinstance(fp, unicode)) assert(isinstance(fn, unicode)) assert(isinstance(fc, str)) arrow(fn) try: compile(fc, fn.encode(getpreferredencoding()), "exec") except SyntaxError as e: raise ISError(exception=e) arrowlevel(-1)
def check(self, message="Check MD5"): ''' Check md5 and size of tarballs are correct Download tarball from path and compare the loaded md5 and remote ''' arrow(message) arrowlevel(1) # check image fo = PipeFile(self.path, "r") fo.consume() fo.close() if self.size != fo.read_size: raise ISError(u"Invalid size of image %s" % self.name) if self.md5 != fo.md5: raise ISError(u"Invalid MD5 of image %s" % self.name) # check payloads for pay_name, pay_obj in self.payload.items(): arrow(pay_name) pay_obj.check() arrowlevel(-1)
def run(self, parser, extparser, load_modules=True, run_parser=True, run_setup=True): ''' Run images scripts parser is the whole command line parser extparser is the parser extensible by parser scripts if load_modules is true load image modules if run_parser is true run parser scripts if run_setup is true run setup scripts ''' # register start time t0 = time() # load image modules if load_modules: self.load_modules(lambda: self.select_scripts("lib")) # run parser scripts to extend extparser # those scripts should only extend the parser or produce error if run_parser: self.run_scripts("parser", lambda: self.select_scripts("parser"), "/", {"parser": extparser}) # call parser (again), with full options arrow("Parsing command line") # encode command line arguments to utf-8 args = argv()[1:] # catch exception in custom argparse action try: args = parser.parse_args(args=args) except Exception as e: raise ISError("Argument parser", e) # run setup scripts if run_setup: self.run_scripts("setup", lambda: self.select_scripts("setup"), "/", {"namespace": args}) # return the building time return int(time() - t0)
def add(self, image, delete=False): ''' Add a packaged image to repository if delete is true, remove original files ''' # check local repository if not self.local: raise ISError(u"Repository addition must be local") # cannot add already existant image if self.has(image.name, image.version): raise ISError(u"Image already in database, delete first!") # adding file to repository arrow("Copying images and payload") for obj in [ image ] + image.payload.values(): dest = join(self.config.path, obj.md5) basesrc = basename(obj.path) if exists(dest): arrow(u"Skipping %s: already exists" % basesrc, 1) else: arrow(u"Adding %s (%s)" % (basesrc, obj.md5), 1) dfo = open(dest, "wb") sfo = PipeFile(obj.path, "r", progressbar=True) sfo.consume(dfo) sfo.close() dfo.close() chrights(dest, self.config.uid, self.config.gid, self.config.fmod) # copy is done. create a image inside repo r_image = PackageImage(join(self.config.path, image.md5), md5name=True) # checking must be done with original md5 r_image.md5 = image.md5 # checking image and payload after copy r_image.check("Check image and payload") self._add(image) # removing orginal files if delete: arrow("Removing original files") for obj in [ image ] + image.payload.values(): arrow(basename(obj.path), 1) unlink(obj.path)
def load_modules(self, select_scripts): ''' Load all modules selected by generator select_scripts select_scripts is a generator which return tuples (fp, fn, fc) where: fp is unicode file path of the module fn is unicode file name of the module (basename) fc is unicode file content ''' arrow(u"Load lib scripts") old_level = arrowlevel(1) self.modules = {} for fp, fn, fc in select_scripts(): # check input unicode stuff assert(isinstance(fp, unicode)) assert(isinstance(fn, unicode)) assert(isinstance(fc, str)) arrow(fn) module_name = splitext(fn.split('-', 1)[1])[0] self.modules[module_name] = self._load_module(module_name, fp, fc) arrowlevel(level=old_level)
def _add(self, image): ''' Add description to db ''' arrow("Adding metadata") self.db.begin() # insert image information arrow("Image", 1) self.db.ask("INSERT INTO image values (?,?,?,?,?,?,?,?,?)", (image.md5, image.name, image.version, image.date, image.author, image.description, image.size, image.is_min_version, image.format, )) # insert data information arrow("Payloads", 1) for name, obj in image.payload.items(): self.db.ask("INSERT INTO payload values (?,?,?,?,?)", (obj.md5, image.md5, name, obj.isdir, obj.size, )) # on commit self.db.commit() # update last file self.update_last()
def init(self): ''' Initialize an empty base repository ''' config = self.config # check local repository if not self.local: raise ISError(u"Repository creation must be local") # create base directories arrow("Creating base directories") arrowlevel(1) # creating local directory try: if exists(config.path): arrow(u"%s already exists" % config.path) else: mkdir(config.path, config.uid, config.gid, config.dmod) arrow(u"%s directory created" % config.path) except Exception as e: raise ISError(u"Unable to create directory %s" % config.path, e) arrowlevel(-1) # create database d = Database.create(config.dbpath) chrights(config.dbpath, uid=config.uid, gid=config.gid, mode=config.fmod) # load database self.db = Database(config.dbpath) # mark repo as not offline self.config.offline = False # create/update last file self.update_last()
def check(self): ''' Check repository for unreferenced and missing files ''' # Check if the repo is local if not self.local: raise ISError(u"Repository must be local") local_files = set(listdir(self.config.path)) local_files.remove(self.config.dbname) local_files.remove(self.config.lastname) db_files = set(self.getallmd5()) # check missing files arrow("Checking missing files") missing_files = db_files - local_files if len(missing_files) > 0: out(linesep.join(missing_files)) # check unreferenced files arrow("Checking unreferenced files") unref_files = local_files - db_files if len(unref_files) > 0: out(linesep.join(unref_files)) # check corruption of local files arrow("Checking corrupted files") for f in local_files: fo = PipeFile(join(self.config.path, f)) fo.consume() fo.close() if fo.md5 != f: out(f)
def parse_description(self): ''' Raise an exception is description file is invalid and return vars to include ''' arrow("Parsing description") d = dict() try: descpath = join(self.base_path, "description") cp = ConfigObj(descpath, configspec=DESCRIPTION_CONFIG_SPEC.splitlines(), encoding="utf8", file_error=True) res = cp.validate(Validator({"IS_name": Image.check_name, "IS_version": Image.check_version, "IS_min_version": Image.check_min_version}), preserve_errors=True) # If everything is fine, the validation return True # Else, it returns a list of (section, optname, error) if res is not True: for section, optname, error in flatten_errors(cp, res): # If error is False, this mean no value as been supplied, # so we use the default value # Else, the check has failed if error: error('Wrong description file, %s %s: %s' % (section, optname, error)) for n in ("name","version", "description", "author", "is_min_version"): d[n] = cp["image"][n] d["compressor"] = {} # set payload compressor d["compressor"]["patterns"] = cp["compressor"].items() if not d["compressor"]["patterns"]: d["compressor"]["patterns"] = [(Image.default_compressor, "*")] for compressor, patterns in cp["compressor"].items(): # is a valid compressor? get_compressor_path(compressor) for pattern in patterns: for payname in fnmatch.filter(self.select_payloads(), pattern): d["compressor"][payname] = compressor except Exception as e: raise ISError(u"Bad description", e) return d
def diff(cls, pkg1, pkg2): ''' Diff two packaged images ''' arrow(u"Difference from images #y#%s v%s#R# to #r#%s v%s#R#:" % (pkg1.name, pkg1.version, pkg2.name, pkg2.version)) # extract images for diff scripts files fromfiles = set(pkg1._tarball.getnames(re_pattern="(parser|setup)/.*")) tofiles = set(pkg2._tarball.getnames(re_pattern="(parser|setup)/.*")) for f in fromfiles | tofiles: # preparing from info if f in fromfiles: fromfile = join(pkg1.filename, f) fromdata = pkg1._tarball.extractfile(f).readlines() else: fromfile = "/dev/null" fromdata = "" # preparing to info if f in tofiles: tofile = join(pkg2.filename, f) todata = pkg2._tarball.extractfile(f).readlines() else: tofile = "/dev/null" todata = "" # generate diff for line in unified_diff(fromdata, todata, fromfile=fromfile, tofile=tofile): # coloring diff if line.startswith("+"): out(u"#g#%s#R#" % line, endl="") elif line.startswith("-"): out(u"#r#%s#R#" % line, endl="") elif line.startswith("@@"): out(u"#c#%s#R#" % line, endl="") else: out(line, endl="")