def publish_package(self): """This method is called by the server to publish a package. It moves the files associated with the transaction into the appropriate position in the server repository. Callers shall supply a fmri, config, and transaction in fmri, cfg, and trans, respectively.""" cfg = self.cfg pkg_name = self.fmri.pkg_name pkgdir = os.path.join(cfg.pkg_root, urllib.quote(pkg_name, "")) # If the directory isn't there, create it. if not os.path.exists(pkgdir): os.makedirs(pkgdir) # mv manifest to pkg_name / version # A package may have no files, so there needn't be a manifest. mpath = os.path.join(self.dir, "manifest") if os.path.exists(mpath): portable.rename(mpath, os.path.join(pkgdir, urllib.quote(str(self.fmri.version), ""))) # Move each file to file_root, with appropriate directory # structure. for f in os.listdir(self.dir): path = misc.hash_file_name(f) src_path = os.path.join(self.dir, f) dst_path = os.path.join(cfg.file_root, path) try: portable.rename(src_path, dst_path) except OSError, e: # XXX We might want to be more careful with this # exception, and only try makedirs() if rename() # failed because the directory didn't exist. # # I'm not sure it matters too much, except that # if makedirs() fails, we'll see that exception, # rather than the original one from rename(). # # Interestingly, rename() failing due to missing # path component fails with ENOENT, not ENOTDIR # like rename(2) suggests (6578404). try: os.makedirs(os.path.dirname(dst_path)) except OSError, e: if e.errno != errno.EEXIST: raise portable.rename(src_path, dst_path)
def file(self, fhash): """Returns the absolute pathname of the file specified by the provided SHA1-hash name.""" self.scfg.inc_file() if fhash is None: raise RepositoryFileNotFoundError(fhash) try: return os.path.normpath(os.path.join( self.scfg.file_root, misc.hash_file_name(fhash))) except EnvironmentError, e: if e.errno == errno.ENOENT: raise RepositoryFileNotFoundError(fhash) raise
def _action_cached(self, action): """If a file with the name action.hash is cached, and if it has the same content hash as action.chash, then return the path to the file. If the file can't be found, return None.""" hashval = action.hash cache_path = os.path.normpath(os.path.join(self.__img.cached_download_dir(), misc.hash_file_name(hashval))) try: if os.path.exists(cache_path): self._verify_content(action, cache_path) return cache_path except tx.InvalidContentException: # If the content in the cache doesn't match the hash of # the action, verify will have already purged the item # from the cache. pass return None
def _action_cached(self, action): """If a file with the name action.hash is cached, and if it has the same content hash as action.chash, then return the path to the file. If the file can't be found, return None.""" hashval = action.hash cache_path = os.path.normpath( os.path.join(self.__img.cached_download_dir(), misc.hash_file_name(hashval))) try: if os.path.exists(cache_path): self._verify_content(action, cache_path) return cache_path except tx.InvalidContentException: # If the content in the cache doesn't match the hash of # the action, verify will have already purged the item # from the cache. pass return None
dl_path = os.path.join(download_dir, s) try: self._verify_content(mfile[s][0], dl_path) except tx.InvalidContentException, e: mfile.subtract_progress(e.size) e.request = s repostats.record_error() failedreqs.append(s) failures.append(e) if not filelist: filelist = failedreqs continue final_path = os.path.normpath(os.path.join(completed_dir, misc.hash_file_name(s))) finaldir = os.path.dirname(final_path) self._makedirs(finaldir) portable.rename(dl_path, final_path) mfile.make_openers(s, final_path) # Return if everything was successful if not filelist and len(errlist) == 0: return if len(failedreqs) > 0 and len(failures) > 0: failures = filter(lambda x: x.request in failedreqs, failures) tfailurex = tx.TransportFailures() for f in failures:
dl_path = os.path.join(download_dir, s) try: self._verify_content(mfile[s][0], dl_path) except tx.InvalidContentException, e: mfile.subtract_progress(e.size) e.request = s repostats.record_error() failedreqs.append(s) failures.append(e) if not filelist: filelist = failedreqs continue final_path = os.path.normpath( os.path.join(completed_dir, misc.hash_file_name(s))) finaldir = os.path.dirname(final_path) self._makedirs(finaldir) portable.rename(dl_path, final_path) mfile.make_openers(s, final_path) # Return if everything was successful if not filelist and len(errlist) == 0: return if len(failedreqs) > 0 and len(failures) > 0: failures = filter(lambda x: x.request in failedreqs, failures) tfailurex = tx.TransportFailures() for f in failures:
def add_content(self, action): """Adds the content of the provided action (if applicable) to the Transaction.""" size = int(action.attrs.get("pkg.size", 0)) if action.name in ("file", "license") and size <= 0: # XXX hack for empty files action.data = lambda: open(os.devnull, "rb") if action.data is not None: bufsz = 64 * 1024 fname, data = misc.get_data_digest(action.data(), length=size, return_content=True) action.hash = fname # Extract ELF information # XXX This needs to be modularized. if haveelf and data[:4] == "\x7fELF": elf_name = "%s/.temp" % self.dir elf_file = open(elf_name, "wb") elf_file.write(data) elf_file.close() try: elf_info = elf.get_info(elf_name) except elf.ElfError, e: raise TransactionContentError(e) try: elf_hash = elf.get_dynamic( elf_name)["hash"] action.attrs["elfhash"] = elf_hash except elf.ElfError: pass action.attrs["elfbits"] = str(elf_info["bits"]) action.attrs["elfarch"] = elf_info["arch"] os.unlink(elf_name) # # This check prevents entering into the depot store # a file which is already there in the store. # This takes CPU load off the depot on large imports # of mostly-the-same stuff. And in general it saves # disk bandwidth, and on ZFS in particular it saves # us space in differential snapshots. We also need # to check that the destination is in the same # compression format as the source, as we must have # properly formed files for chash/csize properties # to work right. # fpath = misc.hash_file_name(fname) dst_path = "%s/%s" % (self.cfg.file_root, fpath) fileneeded = True if os.path.exists(dst_path): if PkgGzipFile.test_is_pkggzipfile(dst_path): fileneeded = False opath = dst_path if fileneeded: opath = os.path.join(self.dir, fname) ofile = PkgGzipFile(opath, "wb") nbuf = size / bufsz for n in range(0, nbuf): l = n * bufsz h = (n + 1) * bufsz ofile.write(data[l:h]) m = nbuf * bufsz ofile.write(data[m:]) ofile.close() data = None # Now that the file has been compressed, determine its # size and store that as an attribute in the manifest # for the file. fs = os.stat(opath) action.attrs["pkg.csize"] = str(fs.st_size) # Compute the SHA hash of the compressed file. # Store this as the chash attribute of the file's # action. In order for this to work correctly, we # have to use the PkgGzipFile class. It omits # filename and timestamp information from the gzip # header, allowing us to generate deterministic # hashes for different files with identical content. cfile = open(opath, "rb") chash = sha.new() while True: cdata = cfile.read(bufsz) if cdata == "": break chash.update(cdata) cfile.close() action.attrs["chash"] = chash.hexdigest() cdata = None