def action(self, mapline, data): preserve_dict = { "renameold": "renameold", "renamenew": "renamenew", "preserve": "true", "svmpreserve": "true" } # If any one of the mode, owner, or group is "?", then we're # clearly not capable of delivering the object correctly, so # ignore it. if mapline.type in "fevdx" and (mapline.mode == "?" or mapline.owner == "?" or mapline.group == "?"): return None if mapline.type in "f": a = file.FileAction(data, mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname, timestamp=misc.time_to_timestamp(int(mapline.modtime))) if mapline.klass in preserve_dict: a.attrs["preserve"] = preserve_dict[mapline.klass] return a elif mapline.type in "ev": # for editable files, map klass onto IPS names; if match # fails, make sure we at least preserve file preserve=preserve_dict.get(mapline.klass, "true") return file.FileAction(data, mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname, preserve=preserve, timestamp=misc.time_to_timestamp(int(mapline.modtime))) elif mapline.type in "dx": return directory.DirectoryAction(mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname) elif mapline.type == "s": return link.LinkAction(path=mapline.pathname, target=mapline.target) elif mapline.type == "l": return hardlink.HardLinkAction(path=mapline.pathname, target=mapline.target) elif mapline.type == "i" and mapline.pathname == "copyright": return license.LicenseAction(data, license="%s.copyright" % self.pkgname, path=mapline.pathname) elif mapline.type == "i": return None else: return unknown.UnknownAction(path=mapline.pathname)
def action(self, mapline, data): preserve_dict = { "renameold": "renameold", "renamenew": "renamenew", "preserve": "true", "svmpreserve": "true" } if mapline.type in "f": a = file.FileAction(data, mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname, timestamp=misc.time_to_timestamp( int(mapline.modtime))) if mapline.klass in preserve_dict: a.attrs["preserve"] = preserve_dict[mapline.klass] return a elif mapline.type in "ev": # for editable files, map klass onto IPS names; if match # fails, make sure we at least preserve file preserve = preserve_dict.get(mapline.klass, "true") return file.FileAction(data, mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname, preserve=preserve, timestamp=misc.time_to_timestamp( int(mapline.modtime))) elif mapline.type in "dx": return directory.DirectoryAction(mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname) elif mapline.type == "s": return link.LinkAction(path=mapline.pathname, target=mapline.target) elif mapline.type == "l": return hardlink.HardLinkAction(path=mapline.pathname, target=mapline.target) elif mapline.type == "i" and mapline.pathname == "copyright": return license.LicenseAction(data, license="%s.copyright" % self.pkgname, path=mapline.pathname) else: return unknown.UnknownAction(path=mapline.pathname)
def action(self, pkgmap, ci, path): try: mapline = pkgmap[path] except KeyError: # XXX Return an unknown instead of a missing, for now. return unknown.UnknownAction(path=path) if mapline.type in "fev": return file.FileAction(ci.extractfile(), mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname, timestamp=misc.time_to_timestamp( int(mapline.modtime))) elif mapline.type in "dx": return directory.DirectoryAction(mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname) elif mapline.type == "s": return link.LinkAction(path=mapline.pathname, target=mapline.target) elif mapline.type == "l": return hardlink.HardLinkAction(path=mapline.pathname, target=mapline.target) else: return unknown.UnknownAction(path=mapline.pathname)
def action(self, pkgmap, ci, path): try: mapline = pkgmap[path] except KeyError: # XXX Return an unknown instead of a missing, for now. return unknown.UnknownAction(path=path) act = None # If any one of the mode, owner, or group is "?", then we're # clearly not capable of delivering the object correctly, so # ignore it. if mapline.type in "fevdx" and (mapline.mode == "?" or mapline.owner == "?" or mapline.group == "?"): return None if mapline.type in "fev": act = file.FileAction(ci.extractfile(), mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname, timestamp=misc.time_to_timestamp(int(mapline.modtime))) elif mapline.type in "dx": act = directory.DirectoryAction(mode = mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname) elif mapline.type == "s": act = link.LinkAction(path=mapline.pathname, target=mapline.target) elif mapline.type == "l": act = hardlink.HardLinkAction(path=mapline.pathname, target=mapline.target) elif mapline.type == "i" and mapline.pathname == "copyright": act = license.LicenseAction(ci.extractfile(), license="%s.copyright" % self.pkgname) act.hash = "install/copyright" elif mapline.type == "i": if mapline.pathname not in ["depend", "pkginfo"]: # check to see if we've seen this script # before script = mapline.pathname if script.startswith("i.") and \ script.replace("i.", "", 1) in \ self.class_action_names: pass elif script.startswith("r.") and \ script.replace("r.", "", 1) in \ self.class_action_names: pass else: self.scripts.add(script) return None else: act = unknown.UnknownAction(path=mapline.pathname) if self.hollow and act: act.attrs[self.hollow_attr] = "true" return act
def create_tracker(): try: f = open(lcfile, "wb") f.write("%s\n" % misc.time_to_timestamp( calendar.timegm(value.utctimetuple()))) f.close() except EnvironmentError, e: # If the file can't be written due to # permissions or because the filesystem is # read-only, continue on. if e.errno not in (errno.EACCES, errno.EROFS): raise
def action(self, mapline, data): preserve_dict = { "renameold": "renameold", "renamenew": "renamenew", "preserve": "true", "svmpreserve": "true" } if mapline.type in "f": a = file.FileAction(data, mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname, timestamp=misc.time_to_timestamp(int(mapline.modtime))) if mapline.klass in preserve_dict: a.attrs["preserve"] = preserve_dict[mapline.klass] return a elif mapline.type in "ev": # for editable files, map klass onto IPS names; if match # fails, make sure we at least preserve file preserve=preserve_dict.get(mapline.klass, "true") return file.FileAction(data, mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname, preserve=preserve, timestamp=misc.time_to_timestamp(int(mapline.modtime))) elif mapline.type in "dx": return directory.DirectoryAction(mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname) elif mapline.type == "s": return link.LinkAction(path=mapline.pathname, target=mapline.target) elif mapline.type == "l": return hardlink.HardLinkAction(path=mapline.pathname, target=mapline.target) elif mapline.type == "i" and mapline.pathname == "copyright": return license.LicenseAction(data, license="%s.copyright" % self.pkgname, path=mapline.pathname) else: return unknown.UnknownAction(path=mapline.pathname)
def action(self, tarfile, tarinfo): if tarinfo.isreg(): return file.FileAction(tarfile.extractfile(tarinfo), mode=oct(stat.S_IMODE(tarinfo.mode)), owner=tarinfo.uname, group=tarinfo.gname, path=tarinfo.name, timestamp=misc.time_to_timestamp(tarinfo.mtime)) elif tarinfo.isdir(): return directory.DirectoryAction( mode=oct(stat.S_IMODE(tarinfo.mode)), owner=tarinfo.uname, group=tarinfo.gname, path=tarinfo.name) elif tarinfo.issym(): return link.LinkAction(path=tarinfo.name, target=tarinfo.linkname) elif tarinfo.islnk(): return hardlink.HardLinkAction(path=tarinfo.name, target=tarinfo.linkname) else: return unknown.UnknownAction(path=tarinfo.name)
def action(self, pkgmap, ci, path): try: mapline = pkgmap[path] except KeyError: # XXX Return an unknown instead of a missing, for now. return unknown.UnknownAction(path=path) # If any one of the mode, owner, or group is "?", then we're # clearly not capable of delivering the object correctly, so # ignore it. if mapline.type in "fevdx" and (mapline.mode == "?" or mapline.owner == "?" or mapline.group == "?"): return None if mapline.type in "fev": return file.FileAction(ci.extractfile(), mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname, timestamp=misc.time_to_timestamp(int(mapline.modtime))) elif mapline.type in "dx": return directory.DirectoryAction(mode = mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname) elif mapline.type == "s": return link.LinkAction(path=mapline.pathname, target=mapline.target) elif mapline.type == "l": return hardlink.HardLinkAction(path=mapline.pathname, target=mapline.target) elif mapline.type == "i" and mapline.pathname == "copyright": return license.LicenseAction(data=ci.extractfile(), license="%s.copyright" % self.pkgname, path=mapline.pathname) elif mapline.type == "i": return None else: return unknown.UnknownAction(path=mapline.pathname)
def action(self, pkgmap, ci, path): try: mapline = pkgmap[path] except KeyError: # XXX Return an unknown instead of a missing, for now. return unknown.UnknownAction(path=path) if mapline.type in "fev": return file.FileAction(ci.extractfile(), mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname, timestamp=misc.time_to_timestamp(int(mapline.modtime))) elif mapline.type in "dx": return directory.DirectoryAction(mode = mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname) elif mapline.type == "s": return link.LinkAction(path=mapline.pathname, target=mapline.target) elif mapline.type == "l": return hardlink.HardLinkAction(path=mapline.pathname, target=mapline.target) else: return unknown.UnknownAction(path=mapline.pathname)
path = pkg.actions.set_action_data(a.hash, a, basedirs=basedirs, bundles=bundles)[0] elif a.name in nopub_actions: error(_("invalid action for publication: %s") % action, cmd="publish") t.close(abandon=True) return 1 if a.name == "file": basename = os.path.basename(a.attrs["path"]) for pattern in timestamp_files: if fnmatch.fnmatch(basename, pattern): if not isinstance(path, basestring): # Target is from bundle; can't # apply timestamp now. continue ts = misc.time_to_timestamp( os.stat(path).st_mtime) a.attrs["timestamp"] = ts break try: t.add(a) except: t.close(abandon=True) raise pkg_state, pkg_fmri = t.close(abandon=False, add_to_catalog=add_to_catalog) for val in (pkg_state, pkg_fmri): if val is not None: msg(val) return 0
def __setattr__(self, name, value): if name == "client_args": raise AttributeError("'history' object attribute '{0}' " "is read-only.".format(name)) if not name.startswith("operation_"): return object.__setattr__(self, name, value) ops = object.__getattribute__(self, "_History__operations") if name == "operation_name": if not ops: ops = [] object.__setattr__(self, "_History__operations", ops) ops.append({"pathname": None, "operation": _HistoryOperation()}) elif not ops: raise AttributeError( "'history' object attribute '{0}' " "cannot be set before 'operation_name'.".format(name)) op = ops[-1]["operation"] setattr(op, name[len("operation_"):], value) # Access to the class attributes is done through object instead # of just referencing self to avoid any of the special logic in # place interfering with logic here. if name == "operation_name": # Before a new operation starts, clear exception state # for the current one so that when this one ends, the # last operation's exception won't be recorded to this # one. If the error hasn't been recorded by now, it # doesn't matter anyway, so should be safe to clear. sys.exc_clear() # Mark the operation as having started and record # other, relevant information. op.start_time = misc.time_to_timestamp(None) try: op.username = portable.get_username() except KeyError: op.username = "******" op.userid = portable.get_userid() ca = None if sys.argv[0]: ca = [sys.argv[0]] else: # Fallback for clients that provide no value. ca = [self.client_name] ca.extend(sys.argv[1:]) object.__setattr__(self, "client_args", ca) object.__setattr__(self, "client_version", pkg.VERSION) elif name == "operation_result": # Record when the operation ended. op.end_time = misc.time_to_timestamp(None) # Some operations shouldn't be saved -- they're merely # included in the stack for completeness or to support # client functionality. if op.name not in DISCARDED_OPERATIONS and \ value != RESULT_NOTHING_TO_DO: # Write current history and last operation to a # file. self.__save() # Discard it now that it is no longer needed. del ops[-1]
def trans_include(repo_uri, fargs, transaction=None): """DEPRECATED""" basedirs = [] timestamp_files = [] error_occurred = False opts, pargs = getopt.getopt(fargs, "d:T:") for opt, arg in opts: if opt == "-d": basedirs.append(arg) elif opt == "-T": timestamp_files.append(arg) if transaction == None: try: trans_id = os.environ["PKG_TRANS_ID"] except KeyError: usage(_("No transaction ID specified in $PKG_TRANS_ID"), cmd="include") xport, pub = setup_transport_and_pubs(repo_uri) t = trans.Transaction(repo_uri, trans_id=trans_id, xport=xport, pub=pub) else: t = transaction if not pargs: filelist = [("<stdin>", sys.stdin)] else: try: filelist = [(f, open(f)) for f in pargs] except IOError as e: error(e, cmd="include") return EXIT_OOPS lines = [] # giant string of all input files concatenated together linecnts = [] # tuples of starting line number, ending line number linecounter = 0 # running total for filename, f in filelist: try: data = f.read() except IOError as e: error(e, cmd="include") return EXIT_OOPS lines.append(data) linecnt = len(data.splitlines()) linecnts.append((linecounter, linecounter + linecnt)) linecounter += linecnt m = pkg.manifest.Manifest() try: m.set_content(content="\n".join(lines)) except apx.InvalidPackageErrors as err: e = err.errors[0] lineno = e.lineno for i, tup in enumerate(linecnts): if lineno > tup[0] and lineno <= tup[1]: filename = filelist[i][0] lineno -= tup[0] break else: filename = "???" lineno = "???" error(_("File {filename} line {lineno}: {err}").format( filename=filename, lineno=lineno, err=e), cmd="include") return EXIT_OOPS invalid_action = False for a in m.gen_actions(): # don't publish this action if a.name == "set" and a.attrs["name"] in ["pkg.fmri", "fmri"]: continue elif a.has_payload: path, bd = pkg.actions.set_action_data(a.hash, a, basedirs) if a.name == "file": basename = os.path.basename(a.attrs["path"]) for pattern in timestamp_files: if fnmatch.fnmatch(basename, pattern): ts = misc.time_to_timestamp(os.stat(path).st_mtime) a.attrs["timestamp"] = ts break if a.name in nopub_actions: error(_("invalid action for publication: {0}").format(str(a)), cmd="include") invalid_action = True else: t.add(a) if invalid_action: return EXIT_PARTIAL else: return EXIT_OK
def trans_publish(repo_uri, fargs): """Publish packages in a single step using provided manifest data and sources.""" # --no-index is now silently ignored as the publication process no # longer builds search indexes automatically. opts, pargs = getopt.getopt( fargs, "b:d:s:T:", ["fmri-in-manifest", "no-index", "no-catalog", "key=", "cert="]) add_to_catalog = True basedirs = [] bundles = [] timestamp_files = [] key = None cert = None for opt, arg in opts: if opt == "-b": bundles.append(arg) elif opt == "-d": basedirs.append(arg) elif opt == "-s": repo_uri = arg if repo_uri and not repo_uri.startswith("null:"): repo_uri = misc.parse_uri(repo_uri) elif opt == "-T": timestamp_files.append(arg) elif opt == "--no-catalog": add_to_catalog = False elif opt == "--key": key = arg elif opt == "--cert": cert = arg if not repo_uri: usage(_("A destination package repository must be provided " "using -s."), cmd="publish") if not pargs: filelist = [("<stdin>", sys.stdin)] else: try: filelist = [(f, open(f)) for f in pargs] except IOError as e: error(e, cmd="publish") return EXIT_OOPS lines = "" # giant string of all input files concatenated together linecnts = [] # tuples of starting line number, ending line number linecounter = 0 # running total for filename, f in filelist: try: data = f.read() except IOError as e: error(e, cmd="publish") return EXIT_OOPS lines += data linecnt = len(data.splitlines()) linecnts.append((linecounter, linecounter + linecnt)) linecounter += linecnt f.close() m = pkg.manifest.Manifest() try: m.set_content(content=lines) except apx.InvalidPackageErrors as err: e = err.errors[0] lineno = e.lineno for i, tup in enumerate(linecnts): if lineno > tup[0] and lineno <= tup[1]: filename = filelist[i][0] lineno -= tup[0] break else: filename = "???" lineno = "???" error(_("File {filename} line {lineno}: {err}").format( filename=filename, lineno=lineno, err=e), cmd="publish") return EXIT_OOPS try: pfmri = pkg.fmri.PkgFmri(m["pkg.fmri"]) if not pfmri.version: # Cannot have a FMRI without version error(_("The pkg.fmri attribute '{0}' in the package " "manifest must include a version.").format(pfmri), cmd="publish") return EXIT_OOPS if not DebugValues["allow-timestamp"]: # If not debugging, timestamps are ignored. pfmri.version.timestr = None pkg_name = pfmri.get_fmri() except KeyError: error(_("Manifest does not set pkg.fmri")) return EXIT_OOPS xport, pub = setup_transport_and_pubs(repo_uri, ssl_key=key, ssl_cert=cert) t = trans.Transaction(repo_uri, pkg_name=pkg_name, xport=xport, pub=pub) t.open() target_files = [] if bundles: # Ensure hardlinks marked as files in the manifest are # treated as files. This necessary when sourcing files # from some bundle types. target_files.extend(a.attrs["path"] for a in m.gen_actions() if a.name == "file") bundles = [ pkg.bundle.make_bundle(bundle, targetpaths=target_files) for bundle in bundles ] for a in m.gen_actions(): # don't publish these actions if a.name == "signature": msg(_("WARNING: Omitting signature action '{0}'".format(a))) continue if a.name == "set" and a.attrs["name"] in ["pkg.fmri", "fmri"]: continue elif a.has_payload: # Forcibly discard content-related attributes to prevent # errors when reusing manifests with different content. for attr in strip_attrs: a.attrs.pop(attr, None) path = pkg.actions.set_action_data(a.hash, a, basedirs=basedirs, bundles=bundles)[0] elif a.name in nopub_actions: error(_("invalid action for publication: {0}").format(action), cmd="publish") t.close(abandon=True) return EXIT_OOPS if a.name == "file": basename = os.path.basename(a.attrs["path"]) for pattern in timestamp_files: if fnmatch.fnmatch(basename, pattern): if not isinstance(path, six.string_types): # Target is from bundle; can't # apply timestamp now. continue ts = misc.time_to_timestamp(os.stat(path).st_mtime) a.attrs["timestamp"] = ts break try: t.add(a) except: t.close(abandon=True) raise pkg_state, pkg_fmri = t.close(abandon=False, add_to_catalog=add_to_catalog) for val in (pkg_state, pkg_fmri): if val is not None: msg(val) return EXIT_OK
def verify(self, img, **args): """Returns a tuple of lists of the form (errors, warnings, info). The error list will be empty if the action has been correctly installed in the given image. In detail, this verifies that the file is present, and if the preserve attribute is not present, that the hashes and other attributes of the file match.""" if self.attrs.get("preserve") == "abandon": return [], [], [] path = self.get_installed_path(img.get_root()) lstat, errors, warnings, info, abort = \ self.verify_fsobj_common(img, stat.S_IFREG) if lstat: if not stat.S_ISREG(lstat.st_mode): self.replace_required = True if abort: assert errors self.replace_required = True return errors, warnings, info if path.lower().endswith("/bobcat") and args["verbose"] == True: # Returned as a purely informational (untranslated) # message so that no client should interpret it as a # reason to fail verification. info.append("Warning: package may contain bobcat! " "(http://xkcd.com/325/)") preserve = self.attrs.get("preserve") if (preserve is None and "timestamp" in self.attrs and lstat.st_mtime != misc.timestamp_to_time(self.attrs["timestamp"])): errors.append( _("Timestamp: {found} should be " "{expected}").format(found=misc.time_to_timestamp( lstat.st_mtime), expected=self.attrs["timestamp"])) # avoid checking pkg.size if we have any content-hashes present; # different size files may have the same content-hash pkg_size = int(self.attrs.get("pkg.size", 0)) if preserve is None and pkg_size > 0 and \ not set(digest.DEFAULT_GELF_HASH_ATTRS).intersection( set(self.attrs.keys())) and \ lstat.st_size != pkg_size: errors.append( _("Size: {found:d} bytes should be " "{expected:d}").format(found=lstat.st_size, expected=pkg_size)) if (preserve is not None and args["verbose"] == False or lstat is None): return errors, warnings, info if args["forever"] != True: return errors, warnings, info # # Check file contents. # try: # This is a generic mechanism, but only used for libc on # x86, where the "best" version of libc is lofs-mounted # on the canonical path, foiling the standard verify # checks. is_mtpt = self.attrs.get("mountpoint", "").lower() == "true" elfhash = None elferror = None elf_hash_attr, elf_hash_val, \ elf_hash_func = \ digest.get_preferred_hash(self, hash_type=pkg.digest.HASH_GELF) if elf_hash_attr and haveelf and not is_mtpt: # # It's possible for the elf module to # throw while computing the hash, # especially if the file is badly # corrupted or truncated. # try: # On path, only calculate the # content hash that matches # the preferred one on the # action get_elfhash = \ elf_hash_attr == "elfhash" get_sha256 = (not get_elfhash and elf_hash_func == digest.GELF_HASH_ALGS["gelf:sha256"]) get_sha512t_256 = ( not get_elfhash and elf_hash_func == digest.GELF_HASH_ALGS["gelf:sha512t_256"]) elfhash = elf.get_hashes( path, elfhash=get_elfhash, sha256=get_sha256, sha512t_256=get_sha512t_256)[elf_hash_attr] if get_elfhash: elfhash = [elfhash] else: elfhash = list(digest.ContentHash(elfhash).values()) except elf.ElfError as e: # Any ELF error means there is something bad # with the file, mark as needing to be replaced. elferror = _("ELF failure: {0}").format(e) if (elfhash is not None and elf_hash_val != elfhash[0]): elferror = _("ELF content hash: " "{found} " "should be {expected}").format( found=elfhash[0], expected=elf_hash_val) # Always check on the file hash because the ELF hash # check only checks on the ELF parts and does not # check for some other file integrity issues. if not is_mtpt: hash_attr, hash_val, hash_func = \ digest.get_preferred_hash(self) sha_hash, data = misc.get_data_digest(path, hash_func=hash_func) if sha_hash != hash_val: # Prefer the ELF content hash error message. if preserve is not None: info.append(_("editable file has " "been changed")) elif elferror: errors.append(elferror) self.replace_required = True else: errors.append( _("Hash: " "{found} should be " "{expected}").format(found=sha_hash, expected=hash_val)) self.replace_required = True # Check system attributes. # Since some attributes like 'archive' or 'av_modified' # are set automatically by the FS, it makes no sense to # check for 1:1 matches. So we only check that the # system attributes specified in the action are still # set on the file. sattr = self.attrs.get("sysattr", None) if sattr: if isinstance(sattr, list): sattr = ",".join(sattr) sattrs = sattr.split(",") if len(sattrs) == 1 and \ sattrs[0] not in portable.get_sysattr_dict(): # not a verbose attr, try as a compact set_attrs = portable.fgetattr(path, compact=True) sattrs = sattrs[0] else: set_attrs = portable.fgetattr(path) for a in sattrs: if a not in set_attrs: errors.append( _("System attribute '{0}' " "not set").format(a)) except EnvironmentError as e: if e.errno == errno.EACCES: errors.append(_("Skipping: Permission Denied")) else: errors.append(_("Unexpected Error: {0}").format(e)) except Exception as e: errors.append(_("Unexpected Exception: {0}").format(e)) return errors, warnings, info
def install(self, pkgplan, orig): """Client-side method that installs a file.""" mode = None try: mode = int(self.attrs.get("mode", None), 8) except (TypeError, ValueError): # Mode isn't valid, so let validate raise a more # informative error. self.validate(fmri=pkgplan.destination_fmri) owner, group = self.get_fsobj_uid_gid(pkgplan, pkgplan.destination_fmri) final_path = self.get_installed_path(pkgplan.image.get_root()) # Don't allow installation through symlinks. self.fsobj_checkpath(pkgplan, final_path) if not os.path.exists(os.path.dirname(final_path)): self.makedirs(os.path.dirname(final_path), mode=misc.PKG_DIR_MODE, fmri=pkgplan.destination_fmri) elif (not orig and not pkgplan.origin_fmri and "preserve" in self.attrs and self.attrs["preserve"] not in ("abandon", "install-only") and os.path.isfile(final_path)): # Unpackaged editable file is already present during # initial install; salvage it before continuing. pkgplan.salvage(final_path) # XXX If we're upgrading, do we need to preserve file perms from # existing file? # check if we have a save_file active; if so, simulate file # being already present rather than installed from scratch if "save_file" in self.attrs: orig = self.restore_file(pkgplan.image) # See if we need to preserve the file, and if so, set that up. # # XXX What happens when we transition from preserve to # non-preserve or vice versa? Do we want to treat a preserve # attribute as turning the action into a critical action? # # XXX We should save the originally installed file. It can be # used as an ancestor for a three-way merge, for example. Where # should it be stored? pres_type = self._check_preserve(orig, pkgplan) do_content = True old_path = None if pres_type == True or (pres_type and pkgplan.origin_fmri == pkgplan.destination_fmri): # File is marked to be preserved and exists so don't # reinstall content. do_content = False elif pres_type == "legacy": # Only rename old file if this is a transition to # preserve=legacy from something else. if orig.attrs.get("preserve", None) != "legacy": old_path = final_path + ".legacy" elif pres_type == "renameold.update": old_path = final_path + ".update" elif pres_type == "renameold": old_path = final_path + ".old" elif pres_type == "renamenew": final_path = final_path + ".new" elif pres_type == "abandon": return # If it is a directory (and not empty) then we should # salvage the contents. if os.path.exists(final_path) and \ not os.path.islink(final_path) and \ os.path.isdir(final_path): try: os.rmdir(final_path) except OSError as e: if e.errno == errno.ENOENT: pass elif e.errno in (errno.EEXIST, errno.ENOTEMPTY): pkgplan.salvage(final_path) elif e.errno != errno.EACCES: # this happens on Windows raise # XXX This needs to be modularized. if do_content and self.needsdata(orig, pkgplan): tfilefd, temp = tempfile.mkstemp(dir=os.path.dirname(final_path)) if not self.data: # The state of the filesystem changed after the # plan was prepared; attempt a one-off # retrieval of the data. self.data = self.__set_data(pkgplan) stream = self.data() tfile = os.fdopen(tfilefd, "wb") try: # Always verify using the most preferred hash hash_attr, hash_val, hash_func = \ digest.get_preferred_hash(self) shasum = misc.gunzip_from_stream(stream, tfile, hash_func) except zlib.error as e: raise ActionExecutionError( self, details=_("Error decompressing payload: " "{0}").format(" ".join([str(a) for a in e.args])), error=e) finally: tfile.close() stream.close() if shasum != hash_val: raise ActionExecutionError( self, details=_("Action data hash verification " "failure: expected: {expected} computed: " "{actual} action: {action}").format( expected=hash_val, actual=shasum, action=self)) else: temp = final_path try: os.chmod(temp, mode) except OSError as e: # If the file didn't exist, assume that's intentional, # and drive on. if e.errno != errno.ENOENT: raise else: return try: portable.chown(temp, owner, group) except OSError as e: if e.errno != errno.EPERM: raise # XXX There's a window where final_path doesn't exist, but we # probably don't care. if do_content and old_path: try: portable.rename(final_path, old_path) except OSError as e: if e.errno != errno.ENOENT: # Only care if file isn't gone already. raise # This is safe even if temp == final_path. try: portable.rename(temp, final_path) except OSError as e: raise api_errors.FileInUseException(final_path) # Handle timestamp if specified (and content was installed). if do_content and "timestamp" in self.attrs: t = misc.timestamp_to_time(self.attrs["timestamp"]) try: os.utime(final_path, (t, t)) except OSError as e: if e.errno != errno.EACCES: raise # On Windows, the time cannot be changed on a # read-only file os.chmod(final_path, stat.S_IRUSR | stat.S_IWUSR) os.utime(final_path, (t, t)) os.chmod(final_path, mode) # Handle system attributes. sattr = self.attrs.get("sysattr") if sattr: if isinstance(sattr, list): sattr = ",".join(sattr) sattrs = sattr.split(",") if len(sattrs) == 1 and \ sattrs[0] not in portable.get_sysattr_dict(): # not a verbose attr, try as a compact attr seq arg = sattrs[0] else: arg = sattrs try: portable.fsetattr(final_path, arg) except OSError as e: if e.errno != errno.EINVAL: raise warn = _("System attributes are not supported " "on the target image filesystem; 'sysattr'" " ignored for {0}").format(self.attrs["path"]) pkgplan.image.imageplan.pd.add_item_message( pkgplan.destination_fmri, misc.time_to_timestamp(time.time()), MSG_WARNING, warn) except ValueError as e: warn = _("Could not set system attributes for {path}" "'{attrlist}': {err}").format(attrlist=sattr, err=e, path=self.attrs["path"]) pkgplan.image.imageplan.pd.add_item_message( pkgplan.destination_fmri, misc.time_to_timestamp(time.time()), MSG_WARNING, warn)
def __setattr__(self, name, value): if name == "client_args": raise AttributeError("'history' object attribute '%s' " "is read-only." % name) if not name.startswith("operation_"): return object.__setattr__(self, name, value) ops = object.__getattribute__(self, "_History__operations") if name == "operation_name": if not ops: ops = [] object.__setattr__(self, "_History__operations", ops) ops.append({ "pathname": None, "operation": _HistoryOperation() }) elif not ops: raise AttributeError("'history' object attribute '%s' " "cannot be set before 'operation_name'." % name) op = ops[-1]["operation"] setattr(op, name[len("operation_"):], value) # Access to the class attributes is done through object instead # of just referencing self to avoid any of the special logic in # place interfering with logic here. if name == "operation_name": # Before a new operation starts, clear exception state # for the current one so that when this one ends, the # last operation's exception won't be recorded to this # one. If the error hasn't been recorded by now, it # doesn't matter anyway, so should be safe to clear. sys.exc_clear() # Mark the operation as having started and record # other, relevant information. op.start_time = misc.time_to_timestamp(None) op.username = portable.get_username() op.userid = portable.get_userid() ca = None if sys.argv[0]: ca = [sys.argv[0]] else: # Fallback for clients that provide no value. ca = [self.client_name] ca.extend(sys.argv[1:]) object.__setattr__(self, "client_args", ca) object.__setattr__(self, "client_version", pkg.VERSION) elif name == "operation_result": # Record when the operation ended. op.end_time = misc.time_to_timestamp(None) # Some operations shouldn't be saved -- they're merely # included in the stack for completeness or to support # client functionality. if op.name not in DISCARDED_OPERATIONS: # Write current history and last operation to a # file. self.__save() # Discard it now that it is no longer needed. del ops[-1]
def verify(self, img, **args): """Returns a tuple of lists of the form (errors, warnings, info). The error list will be empty if the action has been correctly installed in the given image. In detail, this verifies that the file is present, and if the preserve attribute is not present, that the hashes and other attributes of the file match.""" path = os.path.normpath(os.path.sep.join( (img.get_root(), self.attrs["path"]))) lstat, errors, warnings, info, abort = \ self.verify_fsobj_common(img, stat.S_IFREG) if lstat: if not stat.S_ISREG(lstat.st_mode): self.replace_required = True if abort: assert errors self.replace_required = True return errors, warnings, info if path.lower().endswith("/bobcat") and args["verbose"] == True: # Returned as a purely informational (untranslated) # message so that no client should interpret it as a # reason to fail verification. info.append("Warning: package may contain bobcat! " "(http://xkcd.com/325/)") if "preserve" not in self.attrs and \ "timestamp" in self.attrs and lstat.st_mtime != \ misc.timestamp_to_time(self.attrs["timestamp"]): errors.append(_("Timestamp: %(found)s should be " "%(expected)s") % { "found": misc.time_to_timestamp(lstat.st_mtime), "expected": self.attrs["timestamp"] }) # avoid checking pkg.size if elfhash present; # different size files may have the same elfhash if "preserve" not in self.attrs and \ "pkg.size" in self.attrs and \ "elfhash" not in self.attrs and \ lstat.st_size != int(self.attrs["pkg.size"]): errors.append(_("Size: %(found)d bytes should be " "%(expected)d") % { "found": lstat.st_size, "expected": int(self.attrs["pkg.size"]) }) if "preserve" in self.attrs: if args["verbose"] == False or lstat is None: return errors, warnings, info if args["forever"] != True: return errors, warnings, info # # Check file contents # try: # This is a generic mechanism, but only used for libc on # x86, where the "best" version of libc is lofs-mounted # on the canonical path, foiling the standard verify # checks. is_mtpt = self.attrs.get("mountpoint", "").lower() == "true" elfhash = None elferror = None if "elfhash" in self.attrs and haveelf and not is_mtpt: # # It's possible for the elf module to # throw while computing the hash, # especially if the file is badly # corrupted or truncated. # try: elfhash = elf.get_dynamic(path)["hash"] except RuntimeError, e: errors.append("Elfhash: %s" % e) if elfhash is not None and \ elfhash != self.attrs["elfhash"]: elferror = _("Elfhash: %(found)s " "should be %(expected)s") % { "found": elfhash, "expected": self.attrs["elfhash"] } # If we failed to compute the content hash, or the # content hash failed to verify, try the file hash. # If the content hash fails to match but the file hash # matches, it indicates that the content hash algorithm # changed, since obviously the file hash is a superset # of the content hash. if (elfhash is None or elferror) and not is_mtpt: hashvalue, data = misc.get_data_digest(path) if hashvalue != self.hash: # Prefer the content hash error message. if "preserve" in self.attrs: info.append(_( "editable file has" " been changed")) elif elferror: errors.append(elferror) else: errors.append(_("Hash: " "%(found)s should be " "%(expected)s") % { "found": hashvalue, "expected": self.hash }) self.replace_required = True
def verify(self, img, **args): """Returns a tuple of lists of the form (errors, warnings, info). The error list will be empty if the action has been correctly installed in the given image. In detail, this verifies that the file is present, and if the preserve attribute is not present, that the hashes and other attributes of the file match.""" if self.attrs.get("preserve") == "abandon": return [], [], [] path = self.get_installed_path(img.get_root()) lstat, errors, warnings, info, abort = \ self.verify_fsobj_common(img, stat.S_IFREG) if lstat: if not stat.S_ISREG(lstat.st_mode): self.replace_required = True if abort: assert errors self.replace_required = True return errors, warnings, info if path.lower().endswith("/bobcat") and args["verbose"] == True: # Returned as a purely informational (untranslated) # message so that no client should interpret it as a # reason to fail verification. info.append("Warning: package may contain bobcat! " "(http://xkcd.com/325/)") if "preserve" not in self.attrs and \ "timestamp" in self.attrs and lstat.st_mtime != \ misc.timestamp_to_time(self.attrs["timestamp"]): errors.append(_("Timestamp: {found} should be " "{expected}").format( found=misc.time_to_timestamp(lstat.st_mtime), expected=self.attrs["timestamp"])) # avoid checking pkg.size if we have any content-hashes present; # different size files may have the same content-hash if "preserve" not in self.attrs and \ "pkg.size" in self.attrs and \ not set(digest.RANKED_CONTENT_HASH_ATTRS).intersection( set(self.attrs.keys())) and \ lstat.st_size != int(self.attrs["pkg.size"]): errors.append(_("Size: {found:d} bytes should be " "{expected:d}").format(found=lstat.st_size, expected=int(self.attrs["pkg.size"]))) if "preserve" in self.attrs: if args["verbose"] == False or lstat is None: return errors, warnings, info if args["forever"] != True: return errors, warnings, info # # Check file contents. At the moment, the only content-hash # supported in pkg(5) is for ELF files, so this will need work # when additional content-hashes are added. # try: # This is a generic mechanism, but only used for libc on # x86, where the "best" version of libc is lofs-mounted # on the canonical path, foiling the standard verify # checks. is_mtpt = self.attrs.get("mountpoint", "").lower() == "true" elfhash = None elferror = None ehash_attr, elfhash_val, hash_func = \ digest.get_preferred_hash(self, hash_type=pkg.digest.CONTENT_HASH) if ehash_attr and haveelf and not is_mtpt: # # It's possible for the elf module to # throw while computing the hash, # especially if the file is badly # corrupted or truncated. # try: # Annoying that we have to hardcode this if ehash_attr == \ "pkg.content-hash.sha256": get_sha256 = True get_sha1 = False else: get_sha256 = False get_sha1 = True elfhash = elf.get_dynamic(path, sha1=get_sha1, sha256=get_sha256)[ehash_attr] except RuntimeError as e: errors.append( "ELF content hash: {0}".format(e)) if elfhash is not None and \ elfhash != elfhash_val: elferror = _("ELF content hash: " "{found} " "should be {expected}").format( found=elfhash, expected=elfhash_val) # If we failed to compute the content hash, or the # content hash failed to verify, try the file hash. # If the content hash fails to match but the file hash # matches, it indicates that the content hash algorithm # changed, since obviously the file hash is a superset # of the content hash. if (elfhash is None or elferror) and not is_mtpt: hash_attr, hash_val, hash_func = \ digest.get_preferred_hash(self) sha_hash, data = misc.get_data_digest(path, hash_func=hash_func) if sha_hash != hash_val: # Prefer the content hash error message. if "preserve" in self.attrs: info.append(_( "editable file has " "been changed")) elif elferror: errors.append(elferror) self.replace_required = True else: errors.append(_("Hash: " "{found} should be " "{expected}").format( found=sha_hash, expected=hash_val)) self.replace_required = True # Check system attributes. # Since some attributes like 'archive' or 'av_modified' # are set automatically by the FS, it makes no sense to # check for 1:1 matches. So we only check that the # system attributes specified in the action are still # set on the file. sattr = self.attrs.get("sysattr", None) if sattr: sattrs = sattr.split(",") if len(sattrs) == 1 and \ sattrs[0] not in portable.get_sysattr_dict(): # not a verbose attr, try as a compact set_attrs = portable.fgetattr(path, compact=True) sattrs = sattrs[0] else: set_attrs = portable.fgetattr(path) for a in sattrs: if a not in set_attrs: errors.append( _("System attribute '{0}' " "not set").format(a)) except EnvironmentError as e: if e.errno == errno.EACCES: errors.append(_("Skipping: Permission Denied")) else: errors.append(_("Unexpected Error: {0}").format( e)) except Exception as e: errors.append(_("Unexpected Exception: {0}").format(e)) return errors, warnings, info
def verify(self, img, **args): """Returns a tuple of lists of the form (errors, warnings, info). The error list will be empty if the action has been correctly installed in the given image. In detail, this verifies that the file is present, and if the preserve attribute is not present, that the hashes and other attributes of the file match.""" path = os.path.normpath( os.path.sep.join((img.get_root(), self.attrs["path"]))) lstat, errors, warnings, info, abort = \ self.verify_fsobj_common(img, stat.S_IFREG) if lstat: if not stat.S_ISREG(lstat.st_mode): self.replace_required = True if abort: assert errors return errors, warnings, info if path.lower().endswith("/bobcat") and args["verbose"] == True: # Returned as a purely informational (untranslated) # message so that no client should interpret it as a # reason to fail verification. info.append("Warning: package may contain bobcat! " "(http://xkcd.com/325/)") if "preserve" not in self.attrs and \ "timestamp" in self.attrs and lstat.st_mtime != \ misc.timestamp_to_time(self.attrs["timestamp"]): errors.append( _("Timestamp: %(found)s should be " "%(expected)s") % { "found": misc.time_to_timestamp(lstat.st_mtime), "expected": self.attrs["timestamp"] }) # avoid checking pkg.size if elfhash present; # different size files may have the same elfhash if "preserve" not in self.attrs and \ "pkg.size" in self.attrs and \ "elfhash" not in self.attrs and \ lstat.st_size != int(self.attrs["pkg.size"]): errors.append( _("Size: %(found)d bytes should be " "%(expected)d") % { "found": lstat.st_size, "expected": int(self.attrs["pkg.size"]) }) if "preserve" in self.attrs: return errors, warnings, info if args["forever"] != True: return errors, warnings, info # # Check file contents # try: elfhash = None elferror = None if "elfhash" in self.attrs and haveelf: # # It's possible for the elf module to # throw while computing the hash, # especially if the file is badly # corrupted or truncated. # try: elfhash = elf.get_dynamic(path)["hash"] except RuntimeError, e: errors.append("Elfhash: %s" % e) if elfhash is not None and \ elfhash != self.attrs["elfhash"]: elferror = _("Elfhash: %(found)s " "should be %(expected)s") % { "found": elfhash, "expected": self.attrs["elfhash"] } # If we failed to compute the content hash, or the # content hash failed to verify, try the file hash. # If the content hash fails to match but the file hash # matches, it indicates that the content hash algorithm # changed, since obviously the file hash is a superset # of the content hash. if elfhash is None or elferror: hashvalue, data = misc.get_data_digest(path) if hashvalue != self.hash: # Prefer the content hash error message. if elferror: errors.append(elferror) else: errors.append( _("Hash: " "%(found)s should be " "%(expected)s") % { "found": hashvalue, "expected": self.hash }) self.replace_required = True
def verify(self, img, **args): """ verify that file is present and if preserve attribute not present, that hashes match""" path = os.path.normpath( os.path.sep.join((img.get_root(), self.attrs["path"]))) lstat, errors, abort = \ self.verify_fsobj_common(img, stat.S_IFREG) if lstat: if not stat.S_ISREG(lstat.st_mode): self.replace_required = True if abort: assert errors return errors if path.lower().endswith("/cat") and args["verbose"] == True: errors.append("Warning: package may contain bobcat! " "(http://xkcd.com/325/)") if "timestamp" in self.attrs and lstat.st_mtime != \ misc.timestamp_to_time(self.attrs["timestamp"]): errors.append("Timestamp: %s should be %s" % (misc.time_to_timestamp( lstat.st_mtime), self.attrs["timestamp"])) # avoid checking pkg.size if elfhash present; # different size files may have the same elfhash if "preserve" not in self.attrs and \ "pkg.size" in self.attrs and \ "elfhash" not in self.attrs and \ lstat.st_size != int(self.attrs["pkg.size"]): errors.append("Size: %d bytes should be %d" % \ (lstat.st_size, int(self.attrs["pkg.size"]))) if "preserve" in self.attrs: return errors if args["forever"] != True: return errors # # Check file contents # try: elfhash = None elferror = None if "elfhash" in self.attrs and haveelf: # # It's possible for the elf module to # throw while computing the hash, # especially if the file is badly # corrupted or truncated. # try: elfhash = elf.get_dynamic(path)["hash"] except RuntimeError, e: errors.append("Elfhash: %s" % e) if elfhash is not None and \ elfhash != self.attrs["elfhash"]: elferror = "Elfhash: %s should be %s" % \ (elfhash, self.attrs["elfhash"]) # If we failed to compute the content hash, or the # content hash failed to verify, try the file hash. # If the content hash fails to match but the file hash # matches, it indicates that the content hash algorithm # changed, since obviously the file hash is a superset # of the content hash. if elfhash is None or elferror: hashvalue, data = misc.get_data_digest(path) if hashvalue != self.hash: # Prefer the content hash error message. if elferror: errors.append(elferror) else: errors.append("Hash: %s should be %s" % \ (hashvalue, self.hash)) self.replace_required = True
def action(self, mapline, data): preserve_dict = { "renameold": "renameold", "renamenew": "renamenew", "preserve": "true", "svmpreserve": "true" } act = None # If any one of the mode, owner, or group is "?", then we're # clearly not capable of delivering the object correctly, so # ignore it. if mapline.type in "fevdx" and (mapline.mode == "?" or mapline.owner == "?" or mapline.group == "?"): return None if mapline.type in "fev": act = file.FileAction(data, mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname, timestamp=misc.time_to_timestamp(int(mapline.modtime))) # Add a preserve attribute if klass is known to be used # for preservation. For editable and volatile files, # always do at least basic preservation. preserve = preserve_dict.get(mapline.klass, None) if preserve or mapline.type in "ev": if not preserve: preserve = "true" act.attrs["preserve"] = preserve if act.hash == "NOHASH" and \ isinstance(data, basestring) and \ data.startswith(self.filename): act.hash = data[len(self.filename) + 1:] elif mapline.type in "dx": act = directory.DirectoryAction(mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname) elif mapline.type == "s": act = link.LinkAction(path=mapline.pathname, target=mapline.target) elif mapline.type == "l": act = hardlink.HardLinkAction(path=mapline.pathname, target=mapline.target) elif mapline.type == "i" and mapline.pathname == "copyright": act = license.LicenseAction(data, license="%s.copyright" % self.pkgname) if act.hash == "NOHASH" and \ isinstance(data, basestring) and \ data.startswith(self.filename): act.hash = data[len(self.filename) + 1:] elif mapline.type == "i": if mapline.pathname not in ["depend", "pkginfo"]: # check to see if we've seen this script # before script = mapline.pathname if script.startswith("i.") and \ script.replace("i.", "", 1) \ in self.class_action_names: pass elif script.startswith("r.") and \ script.replace("r.", "", 1) in \ self.class_action_names: pass else: self.scripts.add(script) return None else: act = unknown.UnknownAction(path=mapline.pathname) if self.hollow and act: act.attrs["pkg.send.convert.sunw-pkg-hollow"] = "true" return act
def action(self, mapline, data): preserve_dict = { "renameold": "renameold", "renamenew": "renamenew", "preserve": "true", "svmpreserve": "true" } act = None # If any one of the mode, owner, or group is "?", then we're # clearly not capable of delivering the object correctly, so # ignore it. if mapline.type in "fevdx" and (mapline.mode == "?" or mapline.owner == "?" or mapline.group == "?"): return None if mapline.type in "fev": # false positive # file-builtin; pylint: disable=W1607 act = file.FileAction(data, mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname, timestamp=misc.time_to_timestamp( int(mapline.modtime))) # Add a preserve attribute if klass is known to be used # for preservation. For editable and volatile files, # always do at least basic preservation. preserve = preserve_dict.get(mapline.klass, None) if preserve or mapline.type in "ev": if not preserve: preserve = "true" act.attrs["preserve"] = preserve if act.hash == "NOHASH" and \ isinstance(data, six.string_types) and \ data.startswith(self.filename): act.hash = data[len(self.filename) + 1:] elif mapline.type in "dx": act = directory.DirectoryAction(mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname) elif mapline.type == "s": act = link.LinkAction(path=mapline.pathname, target=mapline.target) elif mapline.type == "l": act = hardlink.HardLinkAction(path=mapline.pathname, target=mapline.target) elif mapline.type == "i" and mapline.pathname == "copyright": act = license.LicenseAction(data, license="{0}.copyright".format( self.pkgname)) if act.hash == "NOHASH" and \ isinstance(data, six.string_types) and \ data.startswith(self.filename): act.hash = data[len(self.filename) + 1:] elif mapline.type == "i": if mapline.pathname not in ["depend", "pkginfo"]: # check to see if we've seen this script # before script = mapline.pathname if script.startswith("i.") and \ script.replace("i.", "", 1) \ in self.class_action_names: pass elif script.startswith("r.") and \ script.replace("r.", "", 1) in \ self.class_action_names: pass else: self.scripts.add(script) return None else: act = unknown.UnknownAction(path=mapline.pathname) if self.hollow and act: act.attrs["pkg.send.convert.sunw-pkg-hollow"] = "true" return act
def action(self, pkgmap, ci, path): try: mapline = pkgmap[path] except KeyError: # XXX Return an unknown instead of a missing, for now. return unknown.UnknownAction(path=path) act = None # If any one of the mode, owner, or group is "?", then we're # clearly not capable of delivering the object correctly, so # ignore it. if mapline.type in "fevdx" and (mapline.mode == "?" or mapline.owner == "?" or mapline.group == "?"): return None if mapline.type in "fev": # false positive # file-builtin; pylint: disable=W1607 act = file.FileAction(ci.extractfile(), mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname, timestamp=misc.time_to_timestamp( int(mapline.modtime))) elif mapline.type in "dx": act = directory.DirectoryAction(mode=mapline.mode, owner=mapline.owner, group=mapline.group, path=mapline.pathname) elif mapline.type == "s": act = link.LinkAction(path=mapline.pathname, target=mapline.target) elif mapline.type == "l": act = hardlink.HardLinkAction(path=mapline.pathname, target=mapline.target) elif mapline.type == "i" and mapline.pathname == "copyright": act = license.LicenseAction(ci.extractfile(), license="{0}.copyright".format( self.pkgname)) act.hash = "install/copyright" elif mapline.type == "i": if mapline.pathname not in ["depend", "pkginfo"]: # check to see if we've seen this script # before script = mapline.pathname if script.startswith("i.") and \ script.replace("i.", "", 1) in \ self.class_action_names: pass elif script.startswith("r.") and \ script.replace("r.", "", 1) in \ self.class_action_names: pass else: self.scripts.add(script) return None else: act = unknown.UnknownAction(path=mapline.pathname) if self.hollow and act: act.attrs[self.hollow_attr] = "true" return act
def verify(self, img, **args): """ verify that file is present and if preserve attribute not present, that hashes match""" path = os.path.normpath(os.path.sep.join( (img.get_root(), self.attrs["path"]))) lstat, errors, abort = \ self.verify_fsobj_common(img, stat.S_IFREG) if lstat: if not stat.S_ISREG(lstat.st_mode): self.replace_required = True if abort: assert errors return errors if path.lower().endswith("/cat") and args["verbose"] == True: errors.append("Warning: package may contain bobcat! " "(http://xkcd.com/325/)") if "timestamp" in self.attrs and lstat.st_mtime != \ misc.timestamp_to_time(self.attrs["timestamp"]): errors.append("Timestamp: %s should be %s" % (misc.time_to_timestamp(lstat.st_mtime), self.attrs["timestamp"])) # avoid checking pkg.size if elfhash present; # different size files may have the same elfhash if "preserve" not in self.attrs and \ "pkg.size" in self.attrs and \ "elfhash" not in self.attrs and \ lstat.st_size != int(self.attrs["pkg.size"]): errors.append("Size: %d bytes should be %d" % \ (lstat.st_size, int(self.attrs["pkg.size"]))) if "preserve" in self.attrs: return errors if args["forever"] != True: return errors # # Check file contents # try: elfhash = None elferror = None if "elfhash" in self.attrs and haveelf: # # It's possible for the elf module to # throw while computing the hash, # especially if the file is badly # corrupted or truncated. # try: elfhash = elf.get_dynamic(path)["hash"] except RuntimeError, e: errors.append("Elfhash: %s" % e) if elfhash is not None and \ elfhash != self.attrs["elfhash"]: elferror = "Elfhash: %s should be %s" % \ (elfhash, self.attrs["elfhash"]) # If we failed to compute the content hash, or the # content hash failed to verify, try the file hash. # If the content hash fails to match but the file hash # matches, it indicates that the content hash algorithm # changed, since obviously the file hash is a superset # of the content hash. if elfhash is None or elferror: hashvalue, data = misc.get_data_digest(path) if hashvalue != self.hash: # Prefer the content hash error message. if elferror: errors.append(elferror) else: errors.append("Hash: %s should be %s" % \ (hashvalue, self.hash)) self.replace_required = True