def unpack(self, target_dir, clean_dir=True): # check archive file's integrity if not util.check_file_hash(self.archiveFile, self.archive.sha1sum): ctx.ui.warning( _("Archive File: {}\n * Expected sha1 value: {}\n * Received sha1 value: {}\n" .format(self.url.filename(), self.archive.sha1sum, util.sha1_file(self.archiveFile)))) if not ctx.get_option('ignore_verify'): raise SourceArchiveError(_("unpack: check_file_hash failed.")) else: ctx.ui.warning( _("* Archive verification passed. Such problems may occur during the build process." )) try: archive = Archive(self.archiveFile, self.archive.type) except UnknownArchiveType: raise SourceArchiveError( _("Unknown archive type '{0}' is given for '{1}'.").format( self.archive.type, self.url.filename())) except ArchiveHandlerNotInstalled: raise SourceArchiveError( _("Inary needs \'{}\' to unpack this archive but it is not installed." ).format(self.archive.type)) target_dir = os.path.join(target_dir, self.archive.target or "") archive.unpack(target_dir, clean_dir)
def remove_file(fileinfo, package_name, remove_permanent=False, store_old_paths=None): if fileinfo.permanent and not remove_permanent: return fpath = util.join_path(ctx.config.dest_dir(), fileinfo.path) historydb = inary.db.historydb.HistoryDB() # we should check if the file belongs to another # package (this can legitimately occur while upgrading # two packages such that a file has moved from one package to # another as in #2911) pkg = ctx.filesdb.get_filename(fileinfo.path) if pkg and pkg != package_name: ctx.ui.warning( _('Not removing conflicted file : \"{}\"').format(fpath)) return if fileinfo.type == ctx.const.conf: # config files are precious, leave them as they are # unless they are the same as provided by package. # remove symlinks as they are, cause if the hash of the # file it links has changed, it will be kept as is, # and when the package is reinstalled the symlink will # link to that changed file again. try: if os.path.islink(fpath) or util.sha1_file( fpath) == fileinfo.hash: os.unlink(fpath) else: # keep changed file in history historydb.save_config(package_name, fpath) # after saving to history db, remove the config file any # way if ctx.config.get_option("purge"): os.unlink(fpath) except FileError: pass else: if os.path.isfile(fpath) or os.path.islink(fpath): os.unlink(fpath) if store_old_paths: open(store_old_paths, "a").write("{}\n".format(fpath)) elif os.path.isdir(fpath) and not os.listdir(fpath): os.rmdir(fpath) else: ctx.ui.warning( _('Installed file \"{}\" does not exist on system [Probably you manually deleted]' ).format(fpath)) return # remove emptied directories dpath = os.path.dirname(fpath) while dpath != '/' and os.path.exists(dpath) and not os.listdir(dpath): os.rmdir(dpath) dpath = os.path.dirname(dpath)
def calculate_download_sizes(order): total_size = cached_size = 0 installdb = inary.db.installdb.InstallDB() packagedb = inary.db.packagedb.PackageDB() try: cached_packages_dir = ctx.config.cached_packages_dir() except OSError: # happens when cached_packages_dir tried to be created by an # unpriviledged user cached_packages_dir = None for pkg in [packagedb.get_package(name) for name in order]: delta = None if installdb.has_package(pkg.name): release = installdb.get_release(pkg.name) (distro, distro_release) = installdb.get_distro_release(pkg.name) # inary distro upgrade should not use delta support if distro == pkg.distribution and distro_release == pkg.distributionRelease: delta = pkg.get_delta(release) ignore_delta = ctx.config.values.general.ignore_delta if delta and not ignore_delta: fn = os.path.basename(delta.packageURI) pkg_hash = delta.packageHash pkg_size = delta.packageSize else: fn = os.path.basename(pkg.packageURI) pkg_hash = pkg.packageHash pkg_size = pkg.packageSize if cached_packages_dir: path = util.join_path(cached_packages_dir, fn) # check the file and sha1sum to be sure it _is_ the cached package if os.path.exists(path) and util.sha1_file(path) == pkg_hash: cached_size += pkg_size elif os.path.exists("{}.part".format(path)): cached_size += os.stat("{}.part".format(path)).st_size total_size += pkg_size ctx.ui.notify( ui.cached, logging=False, total=total_size, cached=cached_size) return total_size, cached_size
def add_package(params): try: path, deltas, repo_uri = params ctx.ui.info("{:80.80}\r".format( _(' -> Adding package to index: \"{}\"').format( os.path.basename(path))), noln=True) package = inary.package.Package(path) md = package.get_metadata() md.package.packageSize = int(os.path.getsize(path)) md.package.packageHash = util.sha1_file(path) if ctx.config.options and ctx.config.options.absolute_urls: md.package.packageURI = os.path.realpath(path) else: md.package.packageURI = util.removepathprefix(repo_uri, path) # check package semantics errs = md.errors() if md.errors(): ctx.ui.info("") ctx.ui.error( _(' * Package \"{}\": metadata corrupt, skipping...').format( md.package.name)) ctx.ui.error(str(*errs)) else: # No need to carry these with index (#3965) md.package.files = None md.package.additionalFiles = None if md.package.name in deltas: name, version, release, distro_id, arch = \ util.split_package_filename(path) for delta_path in deltas[md.package.name]: src_release, dst_release, delta_distro_id, delta_arch = \ util.split_delta_package_filename(delta_path)[1:] # Add only delta to latest build of the package if dst_release != md.package.release or \ (delta_distro_id, delta_arch) != (distro_id, arch): continue delta = metadata.Delta() delta.packageURI = util.removepathprefix( repo_uri, delta_path) delta.packageSize = int(os.path.getsize(delta_path)) delta.packageHash = util.sha1_file(delta_path) delta.releaseFrom = src_release md.package.deltaPackages.append(delta) return md.package except KeyboardInterrupt: # Handle KeyboardInterrupt exception to prevent ugly backtrace of all # worker processes and propagate the exception to main process. # # Probably it's better to use just 'raise' here, but multiprocessing # module has some bugs about that: (python#8296, python#9205 and # python#9207 ) # # For now, worker processes do not propagate exceptions other than # Exception (like KeyboardInterrupt), so we have to manually propagate # KeyboardInterrupt exception as an Exception. raise Exception
def from_name(name, ignore_dep=None): packagedb = inary.db.packagedb.PackageDB() installdb = inary.db.installdb.InstallDB() # download package and return an installer object # find package in repository repo = packagedb.which_repo(name) if repo: repodb = inary.db.repodb.RepoDB() ctx.ui.debug( _("Package \"{0}\" found in repository \"{1}\"").format( name, repo)) repo = repodb.get_repo(repo) pkg = packagedb.get_package(name) delta = None # Package is installed. This is an upgrade. Check delta. if installdb.has_package(pkg.name): release = installdb.get_release(pkg.name) (distro, distro_release) = installdb.get_distro_release(pkg.name) # inary distro upgrade should not use delta support if distro == pkg.distribution and distro_release == pkg.distributionRelease: delta = pkg.get_delta(release) ignore_delta = ctx.config.values.general.ignore_delta # If delta exists than use the delta uri. if delta and not ignore_delta: pkg_uri = delta.packageURI pkg_hash = delta.packageHash else: pkg_uri = pkg.packageURI pkg_hash = pkg.packageHash uri = inary.uri.URI(pkg_uri) if uri.is_absolute_path(): pkg_path = str(pkg_uri) else: pkg_path = os.path.join( os.path.dirname(repo.indexuri.get_uri()), str(uri.path())) ctx.ui.info(_("Package URI: \"{}\"").format(pkg_path), verbose=True) # Bug 4113 cached_file = inary.package.Package.is_cached(pkg_path) if cached_file and util.sha1_file(cached_file) != pkg_hash: os.unlink(cached_file) cached_file = None install_op = Install(pkg_path, ignore_dep) # Bug 4113 if not cached_file: downloaded_file = install_op.package.filepath if util.sha1_file(downloaded_file) != pkg_hash: raise Error( _("Download Error: Package does not match the repository package." )) return install_op else: raise Error( _("Package \"{}\" not found in any active repository.").format( name))