Esempio n. 1
0
    def set_signature(self,
                      acts,
                      key_path=None,
                      chain_paths=misc.EmptyI,
                      chash_dir=None):
        """Sets the signature value for this action.

                The 'acts' parameter is the iterable of actions this action
                should sign.

                The 'key_path' parameter is the path to the file containing the
                private key which is used to sign the actions.

                The 'chain_paths' parameter is an iterable of paths to
                certificates which are needed to form the chain of trust from
                the certificate associated with the key in 'key_path' to one of
                the CAs for the publisher of the actions.

                The 'chash_dir' parameter is the temporary directory to use
                while calculating the compressed hashes for chain certs."""

        # Turning this into a list makes debugging vastly more
        # tractable.
        acts = list(acts)

        # If key_path is None, then set value to be the hash
        # of the actions.
        if key_path is None:
            # If no private key is set, then no certificate should
            # have been given.
            assert self.data is None
            h = hashlib.new(self.hash_alg)
            h.update(
                misc.force_bytes(
                    self.actions_to_str(acts, generic.Action.sig_version)))
            self.attrs["value"] = h.hexdigest()
        else:
            # If a private key is used, then the certificate it's
            # paired with must be provided.
            assert self.data is not None
            self.__set_chain_certs_data(chain_paths, chash_dir)

            try:
                with open(key_path, "rb") as f:
                    priv_key = serialization.load_pem_private_key(
                        f.read(), password=None, backend=default_backend())
            except ValueError:
                raise apx.BadFileFormat(
                    _("{0} was expected to "
                      "be a RSA key but could not be read "
                      "correctly.").format(key_path))

            hhash = self.__get_hash_by_name(self.hash_alg)
            signer = priv_key.signer(padding.PKCS1v15(), hhash())
            signer.update(
                misc.force_bytes(
                    self.actions_to_str(acts, generic.Action.sig_version)))
            self.attrs["value"] = \
                misc.binary_to_hex(signer.finalize())
Esempio n. 2
0
def renameat(fromfd, old, tofd, new):
    """Invoke renameat(2)."""

    if not isinstance(fromfd, int):
        raise TypeError("fromfd must be int type")
    if not isinstance(old, six.string_types):
        raise TypeError("old must be a string")
    if not isinstance(tofd, int):
        raise TypeError("tofd must be int type")
    if not isinstance(new, six.string_types):
        raise TypeError("new must be a string")

    rv = lib.renameat(fromfd, force_bytes(old), tofd, force_bytes(new))
    if rv != 0:
        raise OSError(ffi.errno, os.strerror(ffi.errno), old)
Esempio n. 3
0
def __render_template(depot, request, path, pub, http_depot=None):
        template = tlookup.get_template(path)
        base = api.BaseInterface(request, depot, pub)
        # Starting in CherryPy 3.2, cherrypy.response.body only allows
        # bytes.
        return misc.force_bytes(template.render(g_vars={ "base": base,
            "pub": pub, "http_depot": http_depot}))
Esempio n. 4
0
    def info(self, *tokens):
        """Use a DepotHTTP to return an info response."""

        dh = self.__build_depot_http()
        tokens = self.__strip_pub(tokens, dh.repo)
        # In Python 3, a WSGI application must return bytes as its output
        return misc.force_bytes(dh.info_0(*tokens[3:]))
Esempio n. 5
0
    def test_3_replace(self):
        """Verify that insert will replace an existing file even though
                the hashval is the same."""

        # Verify that reverse layout migration works as expected.
        hash1 = "584b6ab7d7eb446938a02e57101c3a2fecbfb3cb"
        hash2 = "584b6ab7d7eb446938a02e57101c3a2fecbfb3cc"
        hash3 = "994b6ab7d7eb446938a02e57101c3a2fecbfb3cc"
        hash4 = "cc1f76cdad188714d1c3b92a4eebb4ec7d646166"

        l1 = layout.V1Layout()

        # Populate the managed location using the v0 layout.
        for fhash in (hash1, hash2, hash3, hash4):
            self.touch_old_file(fhash, data="old-{0}".format(fhash))

        # Migrate it to the v1 layout and verify that each
        # file contains the expected data.
        fm = file_manager.FileManager(self.base_dir, False)
        for fhash in fm.walk():
            loc = fm.lookup(fhash)
            self.assertEqual(loc, os.path.join(self.base_dir,
                                               l1.lookup(fhash)))

            f = open(loc, "rb")
            self.assertEqual(f.read(),
                             misc.force_bytes("old-{0}".format(fhash)))
            f.close()

        # Now replace each file using the old hashnames and verify
        # that the each contains the expected data.
        for fhash in fm.walk():
            loc = os.path.join(self.base_dir, l1.lookup(fhash))
            self.assertTrue(os.path.exists(loc))

            npath = os.path.join(self.base_dir, "new-{0}".format(fhash))
            nfile = open(npath, "wb")
            nfile.write(misc.force_bytes("new-{0}".format(fhash)))
            nfile.close()
            fm.insert(fhash, npath)

            loc = fm.lookup(fhash)
            f = open(loc, "rb")
            self.assertEqual(f.read(),
                             misc.force_bytes("new-{0}".format(fhash)))
            f.close()
Esempio n. 6
0
 def readinto(self, b):
     """Read up to len(b) bytes into the writable buffer *b* and
             return the numbers of bytes read."""
     # not-context-manager for py 2.7;
     # pylint: disable=E1129
     with memoryview(b) as view:
         data = self.read(len(view))
         view[:len(data)] = force_bytes(data)
     return len(data)
Esempio n. 7
0
        def __transform_v1_v0(v1_cat, v0_dest):
                name = os.path.join(v0_dest, "attrs")
                f = open(name, "wb")
                f.write(misc.force_bytes("S "
                    "Last-Modified: {0}\n".format(
                     v1_cat.last_modified.isoformat())))
                f.write(misc.force_bytes("S prefix: CRSV\n"))
                f.write(misc.force_bytes(
                        "S npkgs: {0}\n".format(v1_cat.package_version_count)))
                f.close()

                name = os.path.join(v0_dest, "catalog")
                f = open(name, "wb")
                # Now write each FMRI in the catalog in the v0 format:
                # V pkg:/[email protected],5.11-0.86:20080426T173208Z
                for pub, stem, ver in v1_cat.tuples():
                        f.write(misc.force_bytes(
                                "V pkg:/{0}@{1}\n".format(stem, ver)))
                f.close()
Esempio n. 8
0
 def touch_old_file(self, s, data=None):
     if data is None:
         data = s
     p = os.path.join(self.base_dir, self.old_hash(s))
     if not os.path.exists(os.path.dirname(p)):
         os.makedirs(os.path.dirname(p))
     fh = open(p, "wb")
     fh.write(misc.force_bytes(data))
     fh.close()
     return p
Esempio n. 9
0
 def calc_hash(self, vals):
         """Calculate the hash value of the sorted members of vals."""
         vl = list(vals)
         vl.sort()
         # In order to interoperate with older clients, we must use sha-1
         # here.
         shasum = hashlib.sha1()
         for v in vl:
                  # Unicode-objects must be encoded before hashing.
                  shasum.update(force_bytes(v))
         return shasum.hexdigest()
Esempio n. 10
0
def unlinkat(dirfd, path, flag):
    """Invoke unlinkat(2)."""

    if not isinstance(dirfd, int):
        raise TypeError("dirfd must be int type")
    if not isinstance(path, six.string_types):
        raise TypeError("path must be a string")
    if not isinstance(flag, int):
        raise TypeError("flag must be int type")

    rv = lib.unlinkat(dirfd, force_bytes(path), flag)
    if rv < 0:
        raise OSError(ffi.errno, os.strerror(ffi.errno), path)
Esempio n. 11
0
def mkdirat(fd, path, mode):
    """Invoke mkdirat(2)."""

    if not isinstance(fd, int):
        raise TypeError("fd must be int type")
    if not isinstance(path, six.string_types):
        raise TypeError("path must be a string")
    if not isinstance(mode, int):
        raise TypeError("mode must be int type")

    rv = lib.mkdirat(fd, force_bytes(path), mode)
    if rv != 0:
        raise OSError(ffi.errno, os.strerror(ffi.errno), path)
Esempio n. 12
0
def openat(fildes, path, oflag, mode):
    """Invoke openat(2)."""

    if not isinstance(fildes, int):
        raise TypeError("fildes must be int type")
    if not isinstance(path, six.string_types):
        raise TypeError("path must be a string")
    if not isinstance(oflag, int):
        raise TypeError("oflag must be int type")
    if not isinstance(mode, int):
        raise TypeError("mode must be int type")

    rv = lib.openat(fildes, force_bytes(path), oflag, mode)
    if rv < 0:
        raise OSError(ffi.errno, os.strerror(ffi.errno), path)
    return rv
Esempio n. 13
0
    def repo_index(self, *tokens, **params):
        """Generate a page showing the list of repositories served by
                this Apache instance."""

        self.setup(cherrypy.request)
        # In order to reuse the pkg.depotd shtml files, we need to use
        # the pkg.server.api, which means passing a DepotBUI object,
        # despite the fact that we're not serving content for any one
        # repository.  For the purposes of rendering this page, we'll
        # use the first object we come across.
        depot = depot_buis[list(depot_buis.keys())[0]]
        accept_lang = self.get_accept_lang(cherrypy.request, depot)
        cherrypy.request.path_info = "/{0}".format(accept_lang)
        tlookup = mako.lookup.TemplateLookup(directories=[depot.web_root])
        pub = None
        base = pkg.server.api.BaseInterface(cherrypy.request, depot, pub)

        # build a list of all repositories URIs and BUI links,
        # and a dictionary of publishers for each repository URI
        repo_list = []
        repo_pubs = {}
        for repo_prefix in repositories.keys():
            repo = repositories[repo_prefix]
            depot = depot_buis[repo_prefix]
            repo_url = "{0}/{1}".format(cherrypy.request.base, repo_prefix)
            bui_link = "{0}/{1}/index.shtml".format(repo_prefix, accept_lang)
            repo_list.append((repo_url, bui_link))
            repo_pubs[repo_url] = \
                [(pub, "{0}/{1}/{2}".format(
                cherrypy.request.base, repo_prefix,
                pub)) for pub in repo.publishers]
        repo_list.sort()
        template = tlookup.get_template("repos.shtml")
        # Starting in CherryPy 3.2, cherrypy.response.body only allows
        # bytes.
        return misc.force_bytes(
            template.render(
                g_vars={
                    "base": base,
                    "pub": None,
                    "http_depot": "true",
                    "lang": accept_lang,
                    "repo_list": repo_list,
                    "repo_pubs": repo_pubs
                }))
Esempio n. 14
0
    def check_readonly(self, fm, unmoved, p):
        self.assertTrue(os.path.isfile(p))
        self.assertEqual(fm.lookup(unmoved), p)
        fh = fm.lookup(unmoved, opener=True)
        try:
            self.assertEqual(fh.read(), misc.force_bytes(unmoved))
        finally:
            fh.close()
        self.assertTrue(os.path.isfile(p))

        self.check_exception(fm.insert,
                             file_manager.NeedToModifyReadOnlyFileManager,
                             ["create", unmoved], unmoved, p)
        self.assertTrue(os.path.isfile(p))
        self.check_exception(fm.remove,
                             file_manager.NeedToModifyReadOnlyFileManager,
                             ["remove", unmoved], unmoved)
        self.assertTrue(os.path.isfile(p))
Esempio n. 15
0
        def transform_uris(urls, prefix):
                res = []

                for u in urls:
                        m = copy.copy(u)
                        if m.scheme == "http":
                                res.append(m.uri)
                        elif m.scheme == "https":
                                # The system depot handles connecting to the
                                # proxied https repositories, so the client
                                # should communicate over http to prevent it
                                # from doing tunneling.
                                m.change_scheme("http")
                                res.append(m.uri)
                        elif m.scheme == "file":
                                # The system depot provides direct access to
                                # file repositories.  The token <sysrepo> will
                                # be replaced in the client with the url it uses
                                # to communicate with the system repository.
                                res.append("http://{0}/{1}/{2}".format(
                                    publisher.SYSREPO_PROXY, prefix,
                                    digest.DEFAULT_HASH_FUNC(
                                    force_bytes(m.uri.rstrip("/"))).hexdigest()
                                    ))
                        else:
                                assert False, "{0} is an unknown scheme.".format(
                                    u.scheme)

                # Remove duplicates, since the system-repository can only
                # provide one path to a given origin. This can happen if the
                # image has eg. two origins/mirrors configured for a publisher,
                # with one using http and the other using https, but both using
                # the same netloc and path.
                # We want to preserve origin/mirror order, so simply casting
                # into a set is not appropriate.
                values = set()
                res_unique = []
                for item in res:
                        if item not in values:
                                values.add(item)
                                res_unique.append(item)
                return res_unique
Esempio n. 16
0
    def readline(self, *args):
        """Read one entire line from the pipe.
                Can block waiting for input."""

        if self.__readfh is not None:
            # read from the fd that we received over the pipe
            data = self.__readfh.readline(*args)
            if data != "":
                if self.__http_enc:
                    # Python 3`http.client`HTTPReponse`_read_status:
                    # requires a bytes input.
                    return force_bytes(data, "iso-8859-1")
                else:
                    return data
            # the fd we received over the pipe is empty
            self.__readfh = None

        # recieve a file descriptor from the pipe
        fd = self.recvfd()
        if fd == -1:
            return b"" if self.__http_enc else ""
        self.__readfh = os.fdopen(fd)
        # return data from the received fd
        return self.readline(*args)
Esempio n. 17
0
    def verify_sig(self,
                   acts,
                   pub,
                   trust_anchors,
                   use_crls,
                   required_names=None):
        """Try to verify this signature.  It can return True or
                None.  None means we didn't know how to verify this signature.
                If we do know how to verify the signature but it doesn't verify,
                then an exception is raised.

                The 'acts' parameter is the iterable of actions against which
                to verify the signature.

                The 'pub' parameter is the publisher that published the
                package this action signed.

                The 'trust_anchors' parameter contains the trust anchors to use
                when verifying the signature.

                The 'required_names' parameter is a set of strings that must
                be seen as a CN in the chain of trust for the certificate."""

        ver = int(self.attrs["version"])
        # If this signature is tagged with variants, if the version is
        # higher than one we know about, or it uses an unrecognized
        # hash algorithm, we can't handle it yet.
        if self.get_variant_template() or \
            ver > generic.Action.sig_version or not self.hash_alg:
            return None
        # Turning this into a list makes debugging vastly more
        # tractable.
        acts = list(acts)
        # If self.hash is None, then the signature is storing a hash
        # of the actions, not a signed value.
        if self.hash is None:
            assert self.sig_alg is None
            h = hashlib.new(self.hash_alg)
            h.update(misc.force_bytes(self.actions_to_str(acts, ver)))
            computed_hash = h.digest()
            # The attrs value is stored in hex so that it's easy
            # to read.
            if misc.hex_to_binary(self.attrs["value"]) != \
                computed_hash:
                raise apx.UnverifiedSignature(
                    self,
                    _("The signature value did not match the "
                      "expected value. action: {0}").format(self))
            return True
        # Verify a signature that's not just a hash.
        if self.sig_alg is None:
            return None
        # Get the certificate paired with the key which signed this
        # action.
        attr, hash_val, hash_func = \
            digest.get_least_preferred_hash(self)
        cert = pub.get_cert_by_hash(hash_val,
                                    verify_hash=True,
                                    hash_func=hash_func)
        # Make sure that the intermediate certificates that are needed
        # to validate this signature are present.
        self.retrieve_chain_certs(pub)
        try:
            # This import is placed here to break a circular
            # import seen when merge.py is used.
            from pkg.client.publisher import CODE_SIGNING_USE
            # Verify the certificate whose key created this
            # signature action.
            pub.verify_chain(cert,
                             trust_anchors,
                             0,
                             use_crls,
                             required_names=required_names,
                             usages=CODE_SIGNING_USE)
        except apx.SigningException as e:
            e.act = self
            raise
        # Check that the certificate verifies against this signature.
        pub_key = cert.public_key()
        hhash = self.__get_hash_by_name(self.hash_alg)
        signature = misc.hex_to_binary(self.attrs["value"])

        try:
            pub_key.verify(signature,
                           misc.force_bytes(self.actions_to_str(acts, ver)),
                           padding.PKCS1v15(), hhash())
        except InvalidSignature:
            raise apx.UnverifiedSignature(
                self,
                _("The signature value did not match the expected "
                  "value."))

        return True
Esempio n. 18
0
def fsetattr(filename, attr):
    """Set system attributes for a file. The system attributes can either be
    passed as a list of verbose attribute names or a string that consists of
    a sequence of compact attribute options.

    Raises ValueError for invalid system attributes or OSError (with errno set)
    if any of the library calls fail.

    Input examples:
      verbose attributes example: ['hidden', 'archive', 'sensitive', ... ]

    compact attributes example: 'HAT'
    """

    from pkg.misc import force_bytes
    if not isinstance(filename, six.string_types):
        raise TypeError("filename must be string type")
    if not attr:
        raise TypeError("{0} is not a valid system attribute".format(attr))

    compact = False
    sys_attr = -1
    request = ffi.new("nvlist_t **")
    request[0] = ffi.gc(request[0], lib.nvlist_free)

    if lib.nvlist_alloc(request, lib.NV_UNIQUE_NAME, 0) != 0:
        raise OSError(ffi.errno, os.strerror(ffi.errno))

    # A single string indicates system attributes are passed in compact
    # form (e.g. AHi), verbose attributes are read as a list of strings.
    if isinstance(attr, six.string_types):
        compact = True

    for c in attr:
        c = force_bytes(c)
        if compact:
            sys_attr = lib.option_to_attr(c)
        else:
            sys_attr = lib.name_to_attr(c)

        if sys_attr == lib.F_ATTR_INVAL:
            if compact:
                raise ValueError("{0} is not a valid compact system "
                                 "attribute".format(attr))
            else:
                raise ValueError("{0} is not a valid verbose system "
                                 "attribute".format(attr))
        if not is_supported(sys_attr):
            if compact:
                raise ValueError("{0} is not a supported compact system "
                                 "attribute".format(attr))
            else:
                raise ValueError("{0} is not a supported verbose system "
                                 "attribute".format(attr))
        if lib.nvlist_add_boolean_value(request[0], lib.attr_to_name(sys_attr),
                                        1) != 0:
            raise OSError(ffi.errno, os.strerror(ffi.errno))

    fd = os.open(filename, os.O_RDONLY)
    if fd == -1:
        raise OSError(ffi.errno, os.strerror(ffi.errno), filename)

    if lib.fsetattr(fd, lib.XATTR_VIEW_READWRITE, request[0]):
        os.close(fd)
        raise OSError(ffi.errno, os.strerror(ffi.errno), filename)
    os.close(fd)
Esempio n. 19
0
    def test_1(self):
        """Verify base functionality works as expected."""

        t = tempfile.gettempdir()
        no_dir = os.path.join(t, "not_exist")

        # Test that a read only FileManager won't modify the file
        # system.
        fm = file_manager.FileManager(self.base_dir, readonly=True)
        self.assertEqual(os.listdir(self.base_dir), [])

        unmoved = "4b7c923af3a047d4685a39ad7bc9b0382ccde671"

        p = self.touch_old_file(unmoved)
        self.check_readonly(fm, unmoved, p)

        self.assertEqual(set(fm.walk()), set([unmoved]))

        # Test a FileManager that can write to the file system.
        fm = file_manager.FileManager(self.base_dir, False)

        hash1 = "584b6ab7d7eb446938a02e57101c3a2fecbfb3cb"
        hash2 = "584b6ab7d7eb446938a02e57101c3a2fecbfb3cc"
        hash3 = "994b6ab7d7eb446938a02e57101c3a2fecbfb3cc"
        hash4 = "cc1f76cdad188714d1c3b92a4eebb4ec7d646166"

        l = layout.V1Layout()

        self.assertEqual(l.lookup(hash1),
                         "58/584b6ab7d7eb446938a02e57101c3a2fecbfb3cb")

        # Test that looking up a file stored under the old system gets
        # moved to the correct location, that the new location is
        # correctly returned, and that the old location's parent
        # directory no longer exists as only a single file existed
        # there.  Finally, remove it for the next test if successful.
        p1 = self.touch_old_file(hash1)
        self.assertTrue(os.path.isfile(p1))
        self.assertTrue(os.path.isdir(os.path.dirname(p1)))
        self.assertEqual(fm.lookup(hash1),
                         os.path.join(self.base_dir, l.lookup(hash1)))
        self.assertTrue(not os.path.exists(p1))
        self.assertTrue(not os.path.exists(os.path.dirname(p1)))
        fm.remove(hash1)

        # Test that looking up a file stored under the old system gets
        # moved to the correct location, that the new location is
        # correctly returned, and that the old location's parent
        # directory still exists as multiple files were stored there.
        # Finally, remove file stored in the old location for the next
        # few tests.
        p1 = self.touch_old_file(hash1)
        self.touch_old_file(hash2)
        self.assertTrue(os.path.isfile(p1))
        self.assertTrue(os.path.isdir(os.path.dirname(p1)))
        self.assertEqual(fm.lookup(hash1),
                         os.path.join(self.base_dir, l.lookup(hash1)))
        self.assertTrue(not os.path.exists(p1))
        self.assertTrue(os.path.exists(os.path.dirname(p1)))
        fm.remove(hash2)

        # Test that looking up a file stored under the old system gets
        # moved and that it returns a file handle with the correct
        # contents.
        p4 = self.touch_old_file(hash4)
        self.assertTrue(os.path.isfile(p4))
        self.assertTrue(os.path.isdir(os.path.dirname(p4)))
        fh = fm.lookup(hash4, opener=True)
        try:
            self.assertEqual(fh.read(), misc.force_bytes(hash4))
        finally:
            fh.close()
        self.assertTrue(not os.path.exists(p4))
        self.assertTrue(not os.path.exists(os.path.dirname(p4)))

        p3 = self.touch_old_file(hash3)
        self.assertTrue(os.path.isfile(p3))
        self.assertTrue(os.path.isdir(os.path.dirname(p3)))
        fm.insert(hash3, p3)

        self.assertTrue(not os.path.exists(p3))
        self.assertTrue(not os.path.exists(os.path.dirname(p3)))

        fh = fm.lookup(hash3, opener=True)
        try:
            self.assertEqual(fh.read(), misc.force_bytes(hash3))
        finally:
            fh.close()

        # Test that walk returns the expected values.
        self.assertEqual(set(fm.walk()), set([unmoved, hash1, hash4, hash3]))

        # Test that walking with a different set of layouts works as
        # expected.
        fm2 = file_manager.FileManager(self.base_dir,
                                       readonly=True,
                                       layouts=[layout.get_preferred_layout()])

        fs = set([hash1, hash4, hash3])
        try:
            for i in fm2.walk():
                fs.remove(i)
        except file_manager.UnrecognizedFilePaths as e:
            self.assertEqual(e.fps, [p[len(self.base_dir) + 1:]])
        self.assertEqual(fs, set())

        # Test removing a file works and removes the containing
        # directory and that remove removes all instances of a hash
        # from the file manager.
        hash3_loc = os.path.join(self.base_dir, l.lookup(hash3))
        v0_hash3_loc = self.touch_old_file(hash3)

        self.assertTrue(os.path.isfile(hash3_loc))
        self.assertTrue(os.path.isfile(v0_hash3_loc))
        fm.remove(hash3)
        self.assertEqual(fm.lookup(hash3), None)
        self.assertTrue(not os.path.exists(hash3_loc))
        self.assertTrue(not os.path.exists(os.path.dirname(hash3_loc)))
        self.assertTrue(not os.path.exists(v0_hash3_loc))
        self.assertTrue(not os.path.exists(os.path.dirname(v0_hash3_loc)))
        self.assertTrue(os.path.isfile(fm.lookup(hash1)))

        rh2_fd, raw_hash_2_loc = tempfile.mkstemp(dir=self.base_dir)
        rh2_fh = os.fdopen(rh2_fd, "w")
        rh2_fh.write(hash2)
        rh2_fh.close()

        fm.insert(hash2, raw_hash_2_loc)
        h2_loc = fm.lookup(hash2)
        self.assertTrue(os.path.isfile(fm.lookup(hash2)))
        # Test that the directory has two files in it as expected.
        self.assertEqual(set(os.listdir(os.path.dirname(fm.lookup(hash2)))),
                         set([hash1, hash2]))
        # Test removing one of the two files doesn't remove the other.
        fm.remove(hash1)
        self.assertTrue(os.path.isfile(h2_loc))
        self.assertEqual(fm.lookup(hash2), h2_loc)
        self.assertEqual(fm.lookup(hash1), None)
        # Test that removing the second file works and removes the
        # containing directory as well.
        fm.remove(hash2)
        self.assertTrue(not os.path.exists(h2_loc))
        self.assertTrue(not os.path.exists(os.path.dirname(h2_loc)))

        # Test that setting the read_only property works and that none
        # of the activities has effected the location where unmoved has
        # been stored.
        fm.set_read_only()
        self.check_readonly(fm, unmoved, p)
Esempio n. 20
0
def _uri_hash(uri):
    """Returns a string hash of the given URI"""
    # Unicode-objects must be encoded before hashing
    return digest.DEFAULT_HASH_FUNC(misc.force_bytes(uri)).hexdigest()
Esempio n. 21
0
    def test_parse_write(self):
        """Verify that the p5i parsing and writing works as expected."""

        # Verify that p5i export and parse works as expected.
        pub = self.__get_bobcat_pub()

        # First, Ensure that PkgFmri and strings are supported properly.
        # Build a simple list of packages.
        fmri_foo = fmri.PkgFmri("pkg:/[email protected],5.11-0")
        pnames = {
            "bobcat": [fmri_foo],
            "": ["pkg:/[email protected],5.11-0", "baz"],
        }

        # Dump the p5i data.
        fobj = cStringIO()
        p5i.write(fobj, [pub], pkg_names=pnames)

        # Verify that the p5i data ends with a terminating newline.
        # In Python 3, StringIO doesn't support non-zero relative seek.
        fobj.seek(0, os.SEEK_END)
        fobj.seek(fobj.tell() - 1)
        self.assertEqual(fobj.read(), "\n")

        # Verify that output matches expected output.
        fobj.seek(0)
        output = fobj.read()
        self.assertEqualJSON(self.p5i_bobcat, output)

        def validate_results(results):
            # First result should be 'bobcat' publisher and its
            # pkg_names.
            pub, pkg_names = results[0]

            self.assertEqual(pub.prefix, "bobcat")
            self.assertEqual(pub.alias, "cat")
            repo = pub.repository
            self.assertEqual(repo.name, "source")
            self.assertEqual(repo.description, "xkcd.net/325")
            self.assertEqual(repo.legal_uris[0],
                             "http://xkcd.com/license.html")
            self.assertEqual(repo.refresh_seconds, 43200)
            self.assertEqual(pkg_names, [str(fmri_foo)])

            # Last result should be no publisher and a list of
            # pkg_names.
            pub, pkg_names = results[1]
            self.assertEqual(pub, None)
            self.assertEqual(pkg_names, ["pkg:/[email protected],5.11-0", "baz"])

        # Verify that parse returns the expected object and information
        # when provided a fileobj.
        fobj.seek(0)
        validate_results(p5i.parse(fileobj=fobj))

        # Verify that parse returns the expected object and information
        # when provided a file path.
        fobj.seek(0)
        (fd1, path1) = tempfile.mkstemp(dir=self.test_root)
        # tempfile.mkstemp open the file in binary mode
        os.write(fd1, misc.force_bytes(fobj.read()))
        os.close(fd1)
        validate_results(p5i.parse(location=path1))

        # Verify that parse returns the expected object and information
        # when provided a file URI.
        location = os.path.abspath(path1)
        location = urlunparse(("file", "", pathname2url(location), "", "", ""))
        validate_results(p5i.parse(location=location))
        fobj.close()
        fobj = None

        # Verify that appropriate exceptions are raised for p5i
        # information that can't be retrieved (doesn't exist).
        nefpath = os.path.join(self.test_root, "non-existent")
        self.assertRaises(api_errors.RetrievalError,
                          p5i.parse,
                          location="file://{0}".format(nefpath))

        self.assertRaises(api_errors.RetrievalError,
                          p5i.parse,
                          location=nefpath)

        # Verify that appropriate exceptions are raised for invalid
        # p5i information.
        lcpath = os.path.join(self.test_root, "libc.so.1")
        location = os.path.abspath(lcpath)
        location = urlunparse(("file", "", pathname2url(location), "", "", ""))

        # First, test as a file:// URI.
        self.assertRaises(api_errors.InvalidP5IFile,
                          p5i.parse,
                          location=location)

        # Last, test as a pathname.
        self.assertRaises(api_errors.InvalidP5IFile,
                          p5i.parse,
                          location=location)
Esempio n. 22
0
        def lock(self, blocking=True):
                """Lock the lockfile, to prevent access from other
                processes.  If blocking is False, this method will
                return an exception, instead of blocking, if the lock
                is held.  If the lockfile cannot be opened,
                this method may return an EnvironmentError."""

                #
                # The password locking in cfgfiles.py depends on the behavior
                # of this function, which imitates that of libc's lckpwdf(3C).
                # If this function is changed, it either needs to continue to be
                # compatible with lckpwdf, or changes to cfgfiles.py must be
                # made.
                #

                rval = self._lock.acquire(blocking=int(blocking))
                # Lock acquisition failed.
                if not rval:
                        raise self._failure_exc()

                lock_type = fcntl.LOCK_EX
                if not blocking:
                        lock_type |= fcntl.LOCK_NB

                # Attempt an initial open of the lock file.
                lf = None

                # Caller should catch EACCES and EROFS.
                try:
                        # If the file is a symlink we catch an exception
                        # and do not update the file.
                        fd = os.open(self._filepath,
                            os.O_RDWR|os.O_APPEND|os.O_CREAT|
                            os.O_NOFOLLOW)
                        lf = os.fdopen(fd, "ab+")
                except OSError as e:
                        self._lock.release()
                        if e.errno == errno.ELOOP:
                                raise api_errors.UnexpectedLinkError(
                                    os.path.dirname(self._filepath),
                                    os.path.basename(self._filepath),
                                    e.errno)
                        raise e
                except:
                        self._lock.release()
                        raise

                # Attempt to lock the file.
                try:
                        fcntl.lockf(lf, lock_type)
                except IOError as e:
                        if e.errno not in (errno.EAGAIN, errno.EACCES):
                                self._lock.release()
                                raise

                        # If the lock failed (because it is likely contended),
                        # then extract the information about the lock acquirer
                        # and raise an exception.
                        lock_data = lf.read().strip()
                        self._lock.release()
                        if self._get_lockstr:
                                lock_dict = self._get_lockstr(lock_data)
                        else:
                                lock_dict = {}
                        raise self._failure_exc(**lock_dict)

                # Store information about the lock acquirer and write it.
                try:
                        lf.truncate(0)
                        lock_str = None
                        if self._set_lockstr:
                                lock_str = self._set_lockstr()
                        if lock_str:
                                lf.write(misc.force_bytes(lock_str))
                        lf.flush()
                        self._fileobj = lf
                except:
                        self._fileobj = None
                        lf.close()
                        self._lock.release()
                        raise