예제 #1
0
    def make_openers(self, hashval, path):
        """Find each action associated with the hash value hashval.
                Create an opener that points to the file at path for the
                action's data method."""

        totalsz = 0
        nactions = 0

        filesz = os.stat(path).st_size

        for action in self._fhash[hashval]:
            action.data = self._make_opener(path)
            nactions += 1
            totalsz += misc.get_pkg_otw_size(action)

        # The progress tracker accounts for the sizes of all actions
        # even if we only have to perform one download to satisfy
        # multiple actions with the same hashval.  Since we know
        # the size of the file we downloaded, but not necessarily
        # the size of the action responsible for the download,
        # generate the total size and subtract the size that was
        # downloaded.  The downloaded size was already accounted for in
        # the engine's progress tracking.  Adjust the progress tracker
        # by the difference between what we have and the total we should
        # have received.
        bytes = int(totalsz - filesz)
        self._progtrack.download_add_progress((nactions - 1), bytes)
예제 #2
0
    def make_openers(self, hashval, path):
        """Find each action associated with the hash value hashval.
                Create an opener that points to the file at path for the
                action's data method."""

        totalsz = 0
        nactions = 0

        filesz = os.stat(path).st_size

        for action in self._fhash[hashval]:
            action.data = self._make_opener(path)
            nactions += 1
            totalsz += misc.get_pkg_otw_size(action)

        # The progress tracker accounts for the sizes of all actions
        # even if we only have to perform one download to satisfy
        # multiple actions with the same hashval.  Since we know
        # the size of the file we downloaded, but not necessarily
        # the size of the action responsible for the download,
        # generate the total size and subtract the size that was
        # downloaded.  The downloaded size was already accounted for in
        # the engine's progress tracking.  Adjust the progress tracker
        # by the difference between what we have and the total we should
        # have received.
        bytes = int(totalsz - filesz)
        self._progtrack.download_add_progress((nactions - 1), bytes)
예제 #3
0
    def get_xferstats(self):
        if self.__xfersize != -1:
            return (self.__xferfiles, self.__xfersize)

        self.__xfersize = 0
        self.__xferfiles = 0
        for src, dest in itertools.chain(*self.actions):
            if dest and dest.needsdata(src):
                self.__xfersize += get_pkg_otw_size(dest)
                self.__xferfiles += 1

        return (self.__xferfiles, self.__xfersize)
예제 #4
0
    def get_xferstats(self):
        if self.__xfersize != -1:
            return (self.__xferfiles, self.__xfersize)

        self.__xfersize = 0
        self.__xferfiles = 0
        for src, dest in itertools.chain(*self.actions):
            if dest and dest.needsdata(src):
                self.__xfersize += get_pkg_otw_size(dest)
                self.__xferfiles += 1

        return (self.__xferfiles, self.__xfersize)
예제 #5
0
파일: pkgplan.py 프로젝트: thenovum/pkg5
    def get_xferstats(self):
        if self.__xfersize != -1:
            return (self.__xferfiles, self.__xfersize)

        self.__xfersize = 0
        self.__xferfiles = 0
        for src, dest in itertools.chain(*self.actions):
            if dest and dest.needsdata(src, self):
                self.__xfersize += get_pkg_otw_size(dest)
                self.__xferfiles += 1
                if dest.name == "signature":
                    self.__xfersize += \
                        dest.get_action_chain_csize()
                    self.__xferfiles += \
                        len(dest.attrs.get("chain",
                            "").split())

        return (self.__xferfiles, self.__xfersize)
예제 #6
0
파일: pkgplan.py 프로젝트: aszeszo/test
        def get_xferstats(self):
                if self.__xfersize != -1:
                        return (self.__xferfiles, self.__xfersize)

                self.__xfersize = 0
                self.__xferfiles = 0
                for src, dest in itertools.chain(*self.actions):
                        if dest and dest.needsdata(src, self):
                                self.__xfersize += get_pkg_otw_size(dest)
                                self.__xferfiles += 1
                                if dest.name == "signature":
                                        self.__xfersize += \
                                            dest.get_action_chain_csize()
                                        self.__xferfiles += \
                                            len(dest.attrs.get("chain",
                                                "").split())

                return (self.__xferfiles, self.__xfersize)
예제 #7
0
파일: pull.py 프로젝트: aszeszo/test
def get_sizes(mfst):
        """Takes a manifest and return
        (get_bytes, get_files, send_bytes, send_comp_bytes) tuple."""

        getb = 0
        getf = 0
        sendb = 0
        sendcb = 0

        for a in mfst.gen_actions():
                if a.has_payload:
                        getb += get_pkg_otw_size(a)
                        getf += 1
                        sendb += int(a.attrs.get("pkg.size", 0))
                        sendcb += int(a.attrs.get("pkg.csize", 0))
                        if a.name == "signature":
                                getf += len(a.get_chain_certs())
                                getb += a.get_action_chain_csize()
        return getb, getf, sendb, sendcb
예제 #8
0
def get_hashes_and_sizes(m):
        """Returns a dict of hashes and transfer sizes of actions with content
        in a manifest."""

        seen_hashes = set()
        def repeated(a):
                if a in seen_hashes:
                        return True
                seen_hashes.add(a)
                return False

        cshashes = {}
        for atype in ("file", "license"):
                for a in m.gen_actions_by_type(atype):
                        if hasattr(a, "hash") and not repeated(a.hash):
                                sz = int(a.attrs.get("pkg.size", 0))
                                csize = int(a.attrs.get("pkg.csize", 0))
                                otw_sz = get_pkg_otw_size(a)
                                cshashes[a.hash] = (sz, csize, otw_sz)
        return cshashes
예제 #9
0
def get_hashes_and_sizes(m):
    """Returns a dict of hashes and transfer sizes of actions with content
        in a manifest."""

    seen_hashes = set()

    def repeated(a):
        if a in seen_hashes:
            return True
        seen_hashes.add(a)
        return False

    cshashes = {}
    for atype in ("file", "license"):
        for a in m.gen_actions_by_type(atype):
            if hasattr(a, "hash") and not repeated(a.hash):
                sz = int(a.attrs.get("pkg.size", 0))
                csize = int(a.attrs.get("pkg.csize", 0))
                otw_sz = get_pkg_otw_size(a)
                cshashes[a.hash] = (sz, csize, otw_sz)
    return cshashes
예제 #10
0
    def add_action(self, action):
        """The multiple file retrieval operation is asynchronous.
                Add files to retrieve with this function.  Supply the
                publisher in pub and the list of files in filelist.
                Wait for the operation by calling waitFiles."""

        cachedpath = self._transport._action_cached(action)
        if cachedpath:
            action.data = self._make_opener(cachedpath)
            filesz = int(misc.get_pkg_otw_size(action))
            self._progtrack.download_add_progress(1, filesz)
            return

        hashval = action.hash

        # Each fhash key accesses a list of one or more actions.  If we
        # already have a key in the dictionary, get the list and append
        # the action to it.  Otherwise, create a new list with the first
        # action.
        if hashval in self._fhash:
            self._fhash[hashval].append(action)
        else:
            self._fhash[hashval] = [action]
예제 #11
0
    def add_action(self, action):
        """The multiple file retrieval operation is asynchronous.
                Add files to retrieve with this function.  Supply the
                publisher in pub and the list of files in filelist.
                Wait for the operation by calling waitFiles."""

        cachedpath = self._transport._action_cached(action)
        if cachedpath:
            action.data = self._make_opener(cachedpath)
            filesz = int(misc.get_pkg_otw_size(action))
            self._progtrack.download_add_progress(1, filesz)
            return

        hashval = action.hash

        # Each fhash key accesses a list of one or more actions.  If we
        # already have a key in the dictionary, get the list and append
        # the action to it.  Otherwise, create a new list with the first
        # action.
        if hashval in self._fhash:
            self._fhash[hashval].append(action)
        else:
            self._fhash[hashval] = [action]
예제 #12
0
def republish_packages(pub, target_pub, processdict, source_list, variant_list,
        variants, tracker, xport, dest_repo, dest_xport, pkg_tmpdir,
        dry_run=False):
        """Republish packages for publisher pub to dest_repo.

        If we try to republish a package that we have already published,
        an exception is raise.

        pub             the publisher from source_list that we are republishing
        target_pub      the destination publisher
        processdict     a dict indexed by package name of the pkgs to merge
        source_list     a list of source respositories
        variant_list    a list of dicts containing variant names/values
        variants        the unique set of variants across all sources.
        tracker         a progress tracker
        xport           the transport handling our source repositories
        dest_repo       our destination repository
        dest_xport      the transport handling our destination repository
        pkg_tmpdir      a temporary dir used when downloading pkg content
                        which may be deleted and recreated by this method.

        dry_run         True if we should not actually publish
        """

        def get_basename(pfmri):
                open_time = pfmri.get_timestamp()
                return "{0:d}_{0}".format(
                    calendar.timegm(open_time.utctimetuple()),
                    urllib.quote(str(pfmri), ""))

        for entry in processdict:
                man, retrievals = merge_fmris(source_list,
                    processdict[entry], variant_list, variants)

                # Determine total bytes to retrieve for this package; this must
                # be done using the retrievals dict since they are coalesced by
                # hash.
                getbytes = sum(
                    misc.get_pkg_otw_size(a)
                    for i, uri in enumerate(source_list)
                    for a in retrievals[i]
                )

                # Determine total bytes to send for this package; this must be
                # done using the manifest since retrievals are coalesced based
                # on hash, but sends are not.
                sendbytes = sum(
                    int(a.attrs.get("pkg.size", 0))
                    for a in man.gen_actions()
                )

                f = man.fmri

                tracker.republish_start_pkg(f, getbytes=getbytes,
                    sendbytes=sendbytes)

                if dry_run:
                        # Dry-run; attempt a merge of everything but don't
                        # write any data or publish packages.
                        continue

                target_pub.prefix = f.publisher

                # Retrieve package data from each package source.
                for i, uri in enumerate(source_list):
                        pub.repository.origins = [uri]
                        mfile = xport.multi_file_ni(pub, pkg_tmpdir,
                            decompress=True, progtrack=tracker)
                        for a in retrievals[i]:
                                mfile.add_action(a)
                        mfile.wait_files()

                trans_id = get_basename(f)
                pkg_name = f.get_fmri()
                pubs.add(target_pub.prefix)
                # Publish merged package.
                t = trans.Transaction(dest_repo,
                    pkg_name=pkg_name, trans_id=trans_id,
                    xport=dest_xport, pub=target_pub,
                    progtrack=tracker)

                # Remove any previous failed attempt to
                # to republish this package.
                try:
                        t.close(abandon=True)
                except:
                        # It might not exist already.
                        pass

                t.open()
                for a in man.gen_actions():
                        if (a.name == "set" and
                            a.attrs["name"] == "pkg.fmri"):
                                # To be consistent with the
                                # server, the fmri can't be
                                # added to the manifest.
                                continue

                        if hasattr(a, "hash"):
                                fname = os.path.join(pkg_tmpdir,
                                    a.hash)
                                a.data = lambda: open(
                                    fname, "rb")
                        t.add(a)

                # Always defer catalog update.
                t.close(add_to_catalog=False)

                # Done with this package.
                tracker.republish_end_pkg(f)

                # Dump retrieved package data after each republication and
                # recreate the directory for the next package.
                shutil.rmtree(pkg_tmpdir)
                os.mkdir(pkg_tmpdir)
예제 #13
0
파일: pkgmerge.py 프로젝트: aszeszo/test
def republish_packages(pub, target_pub, processdict, source_list, variant_list,
        variants, tracker, xport, dest_repo, dest_xport, pkg_tmpdir,
        dry_run=False):
        """Republish packages for publisher pub to dest_repo.

        If we try to republish a package that we have already published,
        an exception is raise.

        pub             the publisher from source_list that we are republishing
        target_pub      the destination publisher
        processdict     a dict indexed by package name of the pkgs to merge
        source_list     a list of source respositories
        variant_list    a list of dicts containing variant names/values
        variants        the unique set of variants across all sources.
        tracker         a progress tracker
        xport           the transport handling our source repositories
        dest_repo       our destination repository
        dest_xport      the transport handling our destination repository
        pkg_tmpdir      a temporary dir used when downloading pkg content
                        which may be deleted and recreated by this method.

        dry_run         True if we should not actually publish
        """

        def get_basename(pfmri):
                open_time = pfmri.get_timestamp()
                return "%d_%s" % \
                    (calendar.timegm(open_time.utctimetuple()),
                    urllib.quote(str(pfmri), ""))

        for entry in processdict:
                man, retrievals = merge_fmris(source_list,
                    processdict[entry], variant_list, variants)

                # Determine total bytes to retrieve for this package; this must
                # be done using the retrievals dict since they are coalesced by
                # hash.
                getbytes = sum(
                    misc.get_pkg_otw_size(a)
                    for i, uri in enumerate(source_list)
                    for a in retrievals[i]
                )

                # Determine total bytes to send for this package; this must be
                # done using the manifest since retrievals are coalesced based
                # on hash, but sends are not.
                sendbytes = sum(
                    int(a.attrs.get("pkg.size", 0))
                    for a in man.gen_actions()
                )

                f = man.fmri

                tracker.republish_start_pkg(f, getbytes=getbytes,
                    sendbytes=sendbytes)

                if dry_run:
                        # Dry-run; attempt a merge of everything but don't
                        # write any data or publish packages.
                        continue

                target_pub.prefix = f.publisher

                # Retrieve package data from each package source.
                for i, uri in enumerate(source_list):
                        pub.repository.origins = [uri]
                        mfile = xport.multi_file_ni(pub, pkg_tmpdir,
                            decompress=True, progtrack=tracker)
                        for a in retrievals[i]:
                                mfile.add_action(a)
                        mfile.wait_files()

                trans_id = get_basename(f)
                pkg_name = f.get_fmri()
                pubs.add(target_pub.prefix)
                # Publish merged package.
                t = trans.Transaction(dest_repo,
                    pkg_name=pkg_name, trans_id=trans_id,
                    xport=dest_xport, pub=target_pub,
                    progtrack=tracker)

                # Remove any previous failed attempt to
                # to republish this package.
                try:
                        t.close(abandon=True)
                except:
                        # It might not exist already.
                        pass

                t.open()
                for a in man.gen_actions():
                        if (a.name == "set" and
                            a.attrs["name"] == "pkg.fmri"):
                                # To be consistent with the
                                # server, the fmri can't be
                                # added to the manifest.
                                continue

                        if hasattr(a, "hash"):
                                fname = os.path.join(pkg_tmpdir,
                                    a.hash)
                                a.data = lambda: open(
                                    fname, "rb")
                        t.add(a)

                # Always defer catalog update.
                t.close(add_to_catalog=False)

                # Done with this package.
                tracker.republish_end_pkg(f)

                # Dump retrieved package data after each republication and
                # recreate the directory for the next package.
                shutil.rmtree(pkg_tmpdir)
                os.mkdir(pkg_tmpdir)