def __init__(self, uri, mode, transfer_dir="/tmp", sha1sum=False, compress=None, sign=None): """it is pointless to open a file without a URI and a mode""" self.transfer_dir = transfer_dir self.sha1sum = sha1sum self.compress = compress self.sign = sign uri = File.make_uri(uri) if mode == File.read or mode == File.write: self.mode = mode else: raise Error(_("File mode must be either File.read or File.write")) if uri.is_remote_file(): if self.mode == File.read: localfile = File.download( uri, transfer_dir, sha1sum, compress, sign) else: raise Error(_("Remote write not implemented.")) else: localfile = uri.get_uri() if self.mode == File.read: localfile = File.decompress(localfile, self.compress) if self.mode == File.read: access = 'r' else: access = 'w' self.__file__ = open(localfile, access) self.localfile = localfile
def parse_package_name(package_name): """Separate package name and version string. example: tasma-1.0.3-5-p11-x86_64 -> (tasma, 1.0.3-5) """ # Strip extension if exists if package_name.endswith(ctx.const.package_suffix): package_name = remove_suffix(ctx.const.package_suffix, package_name) try: name, version, release, distro_id, arch = package_name.rsplit("-", 4) # Arch field cannot start with a digit. If a digit is found, # the package might have an old format. Raise here to call # the legacy function. if not arch or arch[0] in digits: raise ValueError except ValueError: try: return parse_package_name_legacy(package_name) except BaseException: raise Error(_("Invalid package name: \"{}\"").format(package_name)) return name, "{0}-{1}".format(version, release)
def parse_delta_package_name(package_name): """Separate delta package name and release infos example: tasma-5-7-p11-x86_64.delta.inary -> (tasma, 5, 7) """ # Strip extension if exists if package_name.endswith(ctx.const.delta_package_suffix): package_name = remove_suffix(ctx.const.delta_package_suffix, package_name) try: name, source_release, target_release, distro_id, arch = \ package_name.rsplit("-", 4) # Arch field cannot start with a digit. If a digit is found, # the package might have an old format. Raise here to call # the legacy function. if not arch or arch[0] in digits: raise ValueError except ValueError: try: return parse_delta_package_name_legacy(package_name) except BaseException: raise Error( _("Invalid delta package name: \"{}\"").format(package_name)) return name, source_release, target_release
def make_uri(uri): """handle URI arg""" if isinstance(uri, str): uri = inary.uri.URI(uri) elif not isinstance(uri, inary.uri.URI): raise Error(_("uri must have type either URI or string.")) return uri
def check_operation(self): self.old_pkginfo = None pkg = self.pkginfo if self.installdb.has_package(pkg.name): # is this a reinstallation? (iversion_s, irelease_s) = self.installdb.get_version(pkg.name)[:2] pkg_version = inary.version.make_version(pkg.version) iversion = inary.version.make_version(iversion_s) if pkg_version > iversion: if ctx.config.get_option('store_lib_info'): self.store_old_paths = os.path.join( ctx.config.old_paths_cache_dir(), pkg.name) ctx.ui.info(_('Storing old paths info.')) open(self.store_old_paths, "w").write("Version: {}\n".format(iversion_s)) elif pkg_version < iversion: ctx.ui.warning(_('Downgrade to old version.')) pkg_release = int(pkg.release) irelease = int(irelease_s) # is this an upgrade? # determine and report the kind of upgrade: version, release if pkg_release > irelease: ctx.ui.info(_('Upgrading to new release.')) self.operation = UPGRADE # is this a downgrade? confirm this action. elif pkg_release < irelease: x = _('Downgrade to old distribution release?') if not ctx.ui.confirm(x): raise Error(_('Package downgrade declined')) self.operation = DOWNGRADE else: if self.ask_reinstall: if not ctx.ui.confirm( _('Re-install same version package?')): raise Error(_('Package re-install declined')) self.operation = REINSTALL # schedule for reinstall self.old_files = self.installdb.get_files(pkg.name) self.old_pkginfo = self.installdb.get_info(pkg.name) self.old_path = self.installdb.pkg_dir(pkg.name, iversion_s, irelease_s)
def check_versioning(version, release): try: int(release) inary.version.make_version(version) except (ValueError, inary.version.InvalidVersionError): raise Error( _("{0}-{1} is not a valid INARY version format").format( version, release))
def input(self, msg): if self.libreadline: try: return self.libreadline.readline( msg.encode("UTF-8")).decode("UTF-8") except: raise Error(_("Unable to read from standard input.")) else: return input(msg)
def close(self, delete_transfer=False): # TODO: look this parameter """this method must be called at the end of operation""" self.__file__.close() if self.mode == File.write: compressed_files = [] ctypes = self.compress or 0 if ctypes & File.COMPRESSION_TYPE_XZ: compressed_file = self.localfile + ".xz" compressed_files.append(compressed_file) lzma_file = lzma.LZMAFile(compressed_file, "w") lzma_file.write(open(self.localfile, "rb").read()) lzma_file.close() if ctypes & File.COMPRESSION_TYPE_BZ2: import bz2 compressed_file = self.localfile + ".bz2" compressed_files.append(compressed_file) bz2.BZ2File( compressed_file, "w").write( open( self.localfile).read()) if self.sha1sum: sha1 = inary.util.sha1_file(self.localfile) cs = open(self.localfile + '.sha1sum', 'w') cs.write(sha1) cs.close() for compressed_file in compressed_files: sha1 = inary.util.sha1_file(compressed_file) cs = open(compressed_file + '.sha1sum', 'w') cs.write(sha1) cs.close() if self.sign == File.detached: if inary.util.run_batch( 'gpg --detach-sig ' + self.localfile)[0]: raise Error( _("ERROR: \'gpg --detach-sig {}\' failed.").format(self.localfile)) for compressed_file in compressed_files: if inary.util.run_batch( 'gpg --detach-sig ' + compressed_file)[0]: raise Error( _("ERROR: \'gpg --detach-sig {}\' failed.").format(compressed_file))
def check_requirements(self): """check system requirements""" # Check free space total_size, symbol = util.human_readable_size(util.free_space()) if util.free_space() < self.installedSize: raise Error(_("Is there enought free space in your disk?")) ctx.ui.info(_( "Free space in \'destinationdirectory\': {:.2f} {} ".format( total_size, symbol)), verbose=True)
def uncompress(patchFile, compressType="gz", targetDir=""): """Uncompress the file and return the new path.""" formats = ("gz", "gzip", "bz2", "bzip2", "lzma", "xz") if compressType not in formats: raise Error( _("Compression type is not valid: '{}'").format(compressType)) archive = inary.archive.Archive(patchFile, compressType) try: archive.unpack(targetDir) except Exception as msg: raise Error( _("Error while decompressing \"{0}\": {1}").format(patchFile, msg)) # FIXME: Get file path from Archive instance filePath = join_path(targetDir, os.path.basename(patchFile)) # remove suffix from file cause its uncompressed now extensions = {"gzip": "gz", "bzip2": "bz2"} extension = extensions.get(compressType, compressType) return filePath.split(".{}".format(extension))[0]
def check_relations(self): # check dependencies # if not ctx.config.get_option('ignore_dependency'): # if not self.pkginfo.installable(): # raise Error(_("{} package cannot be installed unless the dependencies are satisfied").format(self.pkginfo.name)) # If it is explicitly specified that package conflicts with this package and also # we passed check_conflicts tests in operations.py than this means a non-conflicting # pkg is in "order" to be installed that has no file conflict problem with this package. # PS: we need this because "order" generating code does not consider # conflicts. def really_conflicts(pkg): if not self.pkginfo.conflicts: return True return pkg not in [x.package for x in self.pkginfo.conflicts] # check file conflicts file_conflicts = [] for f in self.files.list: pkg, existing_file = ctx.filesdb.get_file(f.path) if pkg: dst = util.join_path(ctx.config.dest_dir(), f.path) if pkg != self.pkginfo.name and not os.path.isdir( dst) and really_conflicts(pkg): file_conflicts.append((pkg, existing_file)) if file_conflicts: file_conflicts_str = "" for (pkg, existing_file) in file_conflicts: file_conflicts_str += _( "\"/{0}\" from \"{1}\" package\n").format( existing_file, pkg) msg = _('File conflicts:\n\"{}\"').format(file_conflicts_str) ctx.ui.warning(msg) if not self.ignore_file_conflicts: if not ctx.ui.confirm(_('Do you want to overwrite it?')): raise Error(msg)
def do_patch(sourceDir, patchFile, level=0, name=None, reverse=False): """Apply given patch to the sourceDir.""" cwd = os.getcwd() if os.path.exists(sourceDir): os.chdir(sourceDir) else: raise Error( _("ERROR: WorkDir ({}) does not exist\n").format(sourceDir)) check_file(patchFile) if level is None: with open(patchFile) as patchfile: lines = patchfile.readlines() try: paths_m = [ line.strip().split()[1] for line in lines if line.startswith("---") and "/" in line ] try: paths_p = [ line.strip().split()[1] for line in lines if line.startswith("+++") ] except IndexError: paths_p = [] except IndexError: pass else: if not paths_p: paths_p = paths_m[:] try: paths_m = [ line.strip().split()[1] for line in lines if line.startswith("***") and "/" in line ] except IndexError: pass for path_p, path_m in zip(paths_p, paths_m): if "/dev/null" in path_m and not len( paths_p) - 1 == paths_p.index(path_p): continue level = check_patch_level(sourceDir, path_p) if level is None and len(paths_m) - 1 == paths_m.index( path_m): level = check_patch_level(sourceDir, path_m) if level is not None: ctx.ui.info( _("Detected patch level={0} for {1}").format( level, os.path.basename(patchFile)), verbose=True) break if level is None: level = 0 if name is None: name = os.path.basename(patchFile) if ctx.get_option('use_quilt'): patchesDir = join_path(sourceDir, ctx.const.quilt_dir_suffix) # Make sure sourceDir/patches directory exists and if not create one! if not os.path.exists(patchesDir): os.makedirs(patchesDir) # Import original patch into quilt tree (ret, out, err) = run_batch('quilt import {0} -p {1} -P {2} \"{3}\"'.format( ("-R" if reverse else ""), level, name, patchFile)) # run quilt push to apply original patch into tree (ret, out, err) = run_batch('quilt push') else: # run GNU patch to apply original patch into tree (ret, out, err) = run_batch( "patch --remove-empty-files --no-backup-if-mismatch {0} -p{1} -i \"{2}\"" .format(("-R" if reverse else ""), level, patchFile)) if ret: if out is None and err is None: # Which means stderr and stdout directed so they are None raise Error(_("ERROR: patch (\"{}\") failed.").format(patchFile)) else: raise Error( _("ERROR: patch (\"{0}\") failed: {1}").format(patchFile, out)) os.chdir(cwd)
def download(uri, transfer_dir="/tmp", sha1sum=False, compress=None, sign=None, copylocal=False, pkgname=''): assert isinstance(uri, inary.uri.URI) inary.util.ensure_dirs(transfer_dir) # Check file integrity before saving? check_integrity = sha1sum or sign origfile = inary.util.join_path(transfer_dir, uri.filename()) if sha1sum: sha1filename = File.download( inary.uri.URI( uri.get_uri() + '.sha1sum'), transfer_dir) sha1f = open(sha1filename) newsha1 = sha1f.read().split("\n")[0] if uri.is_remote_file() or copylocal: tmpfile = check_integrity and uri.filename() + ctx.const.temporary_suffix localfile = inary.util.join_path( transfer_dir, tmpfile or uri.filename()) # TODO: code to use old .sha1sum file, is this a necessary optimization? # oldsha1fn = localfile + '.sha1sum' # if os.exists(oldsha1fn): # oldsha1 = file(oldsha1fn).readlines()[0] if sha1sum and os.path.exists(origfile): oldsha1 = inary.util.sha1_file(origfile) if newsha1 == oldsha1: # early terminate, we already got it ;) raise AlreadyHaveException(uri, origfile) if uri.is_remote_file(): ctx.ui.info( _("Fetching {}").format( uri.get_uri()), verbose=True) inary.fetcher.fetch_url( uri, transfer_dir, ctx.ui.Progress, tmpfile, pkgname) else: # copy to transfer dir inary.fetcher.fetch_from_locale( uri.get_uri(), transfer_dir, destfile=localfile) else: localfile = uri.get_uri() # TODO: use a special function here? if localfile.startswith("file:///"): localfile = localfile[7:] if not os.path.exists(localfile): raise IOError(_("File \"{}\" not found.").format(localfile)) if not os.access(localfile, os.W_OK): oldfn = localfile localfile = inary.util.join_path( transfer_dir, os.path.basename(localfile)) shutil.copy(oldfn, localfile) def clean_temporary(): temp_files = [] if sha1sum: temp_files.append(sha1filename) if check_integrity: temp_files.append(localfile) for filename in temp_files: try: os.unlink(filename) except OSError: pass if sha1sum: if inary.util.sha1_file(localfile) != newsha1: clean_temporary() raise Error( _("File integrity of \"{}\" compromised.\n localfile:{}\n newsha1: {}").format( uri, inary.util.sha1_file(localfile), newsha1)) if check_integrity: shutil.move(localfile, origfile) localfile = origfile localfile = File.decompress(localfile, compress) return localfile
def error(msg): if ctx.config.get_option('ignore_action_errors'): ctx.ui.error(msg) else: raise Error(msg)
def from_name(name, ignore_dep=None): packagedb = inary.db.packagedb.PackageDB() installdb = inary.db.installdb.InstallDB() # download package and return an installer object # find package in repository repo = packagedb.which_repo(name) if repo: repodb = inary.db.repodb.RepoDB() ctx.ui.debug( _("Package \"{0}\" found in repository \"{1}\"").format( name, repo)) repo = repodb.get_repo(repo) pkg = packagedb.get_package(name) delta = None # Package is installed. This is an upgrade. Check delta. if installdb.has_package(pkg.name): release = installdb.get_release(pkg.name) (distro, distro_release) = installdb.get_distro_release(pkg.name) # inary distro upgrade should not use delta support if distro == pkg.distribution and distro_release == pkg.distributionRelease: delta = pkg.get_delta(release) ignore_delta = ctx.config.values.general.ignore_delta # If delta exists than use the delta uri. if delta and not ignore_delta: pkg_uri = delta.packageURI pkg_hash = delta.packageHash else: pkg_uri = pkg.packageURI pkg_hash = pkg.packageHash uri = inary.uri.URI(pkg_uri) if uri.is_absolute_path(): pkg_path = str(pkg_uri) else: pkg_path = os.path.join( os.path.dirname(repo.indexuri.get_uri()), str(uri.path())) ctx.ui.info(_("Package URI: \"{}\"").format(pkg_path), verbose=True) # Bug 4113 cached_file = inary.package.Package.is_cached(pkg_path) if cached_file and util.sha1_file(cached_file) != pkg_hash: os.unlink(cached_file) cached_file = None install_op = Install(pkg_path, ignore_dep) # Bug 4113 if not cached_file: downloaded_file = install_op.package.filepath if util.sha1_file(downloaded_file) != pkg_hash: raise Error( _("Download Error: Package does not match the repository package." )) return install_op else: raise Error( _("Package \"{}\" not found in any active repository.").format( name))