def init(self): ''' Initialize an empty base repository ''' config = self.config # check local repository if not self.local: raise ISError(u"Repository creation must be local") # create base directories arrow("Creating base directories") arrowlevel(1) # creating local directory try: if exists(config.path): arrow(u"%s already exists" % config.path) else: mkdir(config.path, config.uid, config.gid, config.dmod) arrow(u"%s directory created" % config.path) except Exception as e: raise ISError(u"Unable to create directory %s" % config.path, e) arrowlevel(-1) # create database d = Database.create(config.dbpath) chrights(config.dbpath, uid=config.uid, gid=config.gid, mode=config.fmod) # load database self.db = Database(config.dbpath) # mark repo as not offline self.config.offline = False # create/update last file self.update_last()
def extract_file(self, dest, force=False): ''' Copy a payload directly to a file Check md5 on the fly ''' # if dest is a directory try to create file inside if isdir(dest): dest = join(dest, self.name) # try to create leading directories elif not exists(dirname(dest)): mkdir(dirname(dest)) # check validity of dest if exists(dest): if isdir(dest): raise ISError(u"Destination %s is a directory" % dest) if not force: raise ISError(u"File %s already exists" % dest) # get compressor argv (first to escape file creation if not found) a_comp = get_compressor_path(self.compressor, compress=False) # try to open payload file (source) try: f_src = PipeFile(self.path, "r", progressbar=True) except Exception as e: raise ISError(u"Unable to open payload file %s" % self.path, e) # check if announced file size is good if f_src.size is not None and self.size != f_src.size: raise ISError(u"Invalid announced size on %s" % self.path) # opening destination try: f_dst = open(dest, "wb") except Exception as e: raise ISError(u"Unable to open destination file %s" % dest, e) # run compressor process p_comp = Popen(a_comp, shell=False, close_fds=True, stdin=PIPE, stdout=f_dst) # close destination file f_dst.close() # push data into compressor f_src.consume(p_comp.stdin) # closing source fo f_src.close() # checking download size if self.size != f_src.read_size: raise ISError("Invalid size") # checking downloaded md5 if self.md5 != f_src.md5: raise ISError("Invalid MD5") # close compressor pipe p_comp.stdin.close() # check compressor return 0 if p_comp.wait() != 0: raise ISError(u"Compressor %s return is not zero" % a_comp[0]) # settings file orginal rights chrights(dest, self.uid, self.gid, self.mode, self.mtime)
def update_last(self): ''' Update last file to current time ''' # check local repository if not self.local: raise ISError(u"Repository must be local") try: arrow("Updating last file") last_path = os.path.join(self.config.path, self.config.lastname) open(last_path, "w").write("%s\n" % int(time.time())) istools.chrights(last_path, self.config.uid, self.config.gid, self.config.fmod) except Exception as e: raise ISError(u"Update last file failed", e)
def add(self, image, delete=False): ''' Add a packaged image to repository if delete is true, remove original files ''' # check local repository if not self.local: raise ISError(u"Repository addition must be local") # cannot add already existant image if self.has(image.name, image.version): raise ISError(u"Image already in database, delete first!") # adding file to repository arrow("Copying images and payload") for obj in [ image ] + image.payload.values(): dest = join(self.config.path, obj.md5) basesrc = basename(obj.path) if exists(dest): arrow(u"Skipping %s: already exists" % basesrc, 1) else: arrow(u"Adding %s (%s)" % (basesrc, obj.md5), 1) dfo = open(dest, "wb") sfo = PipeFile(obj.path, "r", progressbar=True) sfo.consume(dfo) sfo.close() dfo.close() chrights(dest, self.config.uid, self.config.gid, self.config.fmod) # copy is done. create a image inside repo r_image = PackageImage(join(self.config.path, image.md5), md5name=True) # checking must be done with original md5 r_image.md5 = image.md5 # checking image and payload after copy r_image.check("Check image and payload") self._add(image) # removing orginal files if delete: arrow("Removing original files") for obj in [ image ] + image.payload.values(): arrow(basename(obj.path), 1) unlink(obj.path)
def create(cls, path, force=False): ''' Create an empty source image ''' # check local repository if not isfile(path): raise NotImplementedError("SourceImage must be local") # main path build_path = join(path, "build") parser_path = join(path, "parser") setup_path = join(path, "setup") payload_path = join(path, "payload") lib_path = join(path, "lib") # create base directories arrow("Creating base directories") try: for d in (path, build_path, parser_path, setup_path, payload_path, lib_path): if not exists(d) or not isdir(d): mkdir(d) except Exception as e: raise ISError(u"Unable to create directory: %s" % d, e) # create example files arrow("Creating examples") arrowlevel(1) # create dict of file to create examples = {} # create description example from template examples["description"] = { "path": "description", "content": DESCRIPTION_TPL % { "name": "", "version": "1", "description": "", "author": "", "is_min_version": VERSION, "compressor": "gzip = *\nnone = *.gz, *.bz2, *.xz"} } # create changelog example from template examples["changelog"] = {"path": "changelog", "content": CHANGELOG_TPL} # create build example from template examples["build"] = {"path": "build/01-build.py", "content": BUILD_TPL} # create parser example from template examples["parser"] = {"path": "parser/01-parser.py", "content": PARSER_TPL} # create setup example from template examples["setup"] = {"path": "setup/01-setup.py", "content": SETUP_TPL} for name in examples: try: arrow(u"Creating %s example" % name) expath = join(path, examples[name]["path"]) if not force and exists(expath): warn(u"%s already exists. Skipping!" % expath) continue open(expath, "w").write(examples[name]["content"]) except Exception as e: raise ISError(u"Unable to create example file", e) try: # setting executable rights on files in setup and parser arrow("Setting executable rights on scripts") oldmask = umask(0) umask(oldmask) for dpath in (build_path, parser_path, setup_path): for f in listdir(dpath): chrights(join(dpath, f), mode=0777 & ~oldmask) except Exception as e: raise ISError(u"Unable to set rights", e) arrowlevel(-1)
def _cachify(self, config, temp=False, nosync=False): ''' Return a config of a cached repository from an orignal config file :param config: repository configuration :param temp: repository db should be stored in a temporary location :param nosync: if a cache exists, don't try to update it ''' # if cache is disable => temp =True if self.cache_path is None: temp = True try: original_dbpath = config.dbpath if temp and nosync: raise ISError("sync is disabled") elif temp: # this is a temporary cached repository tempfd, config.dbpath = tempfile.mkstemp() os.close(tempfd) self.tempfiles.append(config.dbpath) else: config.dbpath = os.path.join(self.cache_path, config.name) if not nosync: # Open remote database rdb = PipeFile(original_dbpath, timeout=self.timeout) # get remote last modification if rdb.mtime is None: # We doesn't have modification time, we use the last file try: rlast = int(PipeFile(config.lastpath, mode='r', timeout=self.timeout).read().strip()) except ISError: rlast = -1 else: rlast = rdb.mtime # get local last value if os.path.exists(config.dbpath): llast = int(os.stat(config.dbpath).st_mtime) else: llast = -2 # if repo is out of date, download it if rlast != llast: try: arrow(u"Downloading %s" % original_dbpath) rdb.progressbar = True ldb = open(config.dbpath, "wb") rdb.consume(ldb) ldb.close() rdb.close() istools.chrights(config.dbpath, uid=config.uid, gid=config.gid, mode=config.fmod, mtime=rlast) except: if os.path.exists(config.dbpath): os.unlink(config.dbpath) raise except ISError as e : # if something append bad during caching, we mark repo as offline debug(u"Unable to cache repository %s: %s" % (config.name, e)) config.offline = True return self.factory.create(config)