Пример #1
0
def _write_mtime_cache(mtimes, data, location):
    old_umask = os.umask(0113)
    try:
        f = None
        logger.debug("attempting to update mtime cache at %r", (location,))
        try:
            if not ensure_dirs(os.path.dirname(location),
                gid=portage_gid, mode=0775):
                # bugger, can't update..
                return
            f = AtomicWriteFile(location, gid=portage_gid, perms=0664)
            # invert the data...
            rev_data = {}
            for pkg, ver_dict in data.iteritems():
                for fullver, virtuals in ver_dict.iteritems():
                    for virtual in virtuals:
                        rev_data.setdefault(virtual.category, []).extend(
                            (pkg, fullver, str(virtual)))
            for cat, mtime in mtimes.iteritems():
                if cat in rev_data:
                    f.write("%s\t%i\t%s\n" % (cat, mtime,
                         '\t'.join(rev_data[cat])))
                else:
                    f.write("%s\t%i\n" % (cat, mtime))
            f.close()
            os.chown(location, -1, portage_gid)
        except IOError as e:
            if f is not None:
                f.discard()
            if e.errno != errno.EACCES:
                raise
            logger.warning("unable to update vdb virtuals cache due to "
                "lacking permissions")
    finally:
        os.umask(old_umask)
Пример #2
0
def update_use_local_desc(repo, observer):
    """Update a repo's local USE flag description cache (profiles/use.local.desc)"""
    ret = 0
    use_local_desc = pjoin(repo.location, "profiles", "use.local.desc")
    f = None

    def _raise_xml_error(exc):
        observer.error(f'{cat}/{pkg}: failed parsing metadata.xml: {str(exc)}')
        nonlocal ret
        ret = 1

    try:
        f = AtomicWriteFile(use_local_desc)
        f.write(
            textwrap.dedent('''\
            # This file is deprecated as per GLEP 56 in favor of metadata.xml.
            # Please add your descriptions to your package's metadata.xml ONLY.
            # * generated automatically using pmaint *\n\n'''))
        with patch('pkgcore.log.logger.error', _raise_xml_error):
            for cat, pkgs in sorted(repo.packages.items()):
                for pkg in sorted(pkgs):
                    metadata = repo._get_metadata_xml(cat, pkg)
                    for flag, desc in sorted(metadata.local_use.items()):
                        f.write(f'{cat}/{pkg}:{flag} - {desc}\n')
        f.close()
    except IOError as e:
        observer.error(
            f"Unable to update use.local.desc file {use_local_desc!r}: {e.strerror}"
        )
        ret = os.EX_IOERR
    finally:
        if f is not None:
            f.discard()

    return ret
Пример #3
0
def _write_cache_file(path, data, uid=-1, gid=-1):
    """Write a new cache file."""
    cachefile = None
    try:
        try:
            cachefile = AtomicWriteFile(path,
                                        binary=False,
                                        perms=0o664,
                                        uid=uid,
                                        gid=gid)
            cachefile.write(CACHE_HEADER + "\n")
            for (module, mtime), plugs in sorted(data.items(),
                                                 key=operator.itemgetter(0)):
                plugs = sort_plugs(plugs)
                plugs = ':'.join(f'{plug.key},{plug.priority},{plug.target}'
                                 for plug in plugs)
                cachefile.write(f'{module}:{mtime}:{plugs}\n')
            cachefile.close()
        except EnvironmentError as e:
            # We cannot write a new cache. We should log this
            # since it will have a performance impact.

            # Use error, not exception for this one: the traceback
            # is not necessary and too alarming.
            logger.error(
                'Cannot write cache for %s: %s. '
                'Try running pplugincache.', path, e)
    finally:
        if cachefile is not None:
            cachefile.discard()
Пример #4
0
def _write_mtime_cache(mtimes, data, location):
    old_umask = os.umask(0113)
    try:
        f = None
        logger.debug("attempting to update mtime cache at %r", (location,))
        try:
            if not ensure_dirs(os.path.dirname(location),
                gid=portage_gid, mode=0775):
                # bugger, can't update..
                return
            f = AtomicWriteFile(location, gid=portage_gid, perms=0664)
            # invert the data...
            rev_data = {}
            for pkg, ver_dict in data.iteritems():
                for fullver, virtuals in ver_dict.iteritems():
                    for virtual in virtuals:
                        rev_data.setdefault(virtual.category, []).extend(
                            (pkg, fullver, str(virtual)))
            for cat, mtime in mtimes.iteritems():
                if cat in rev_data:
                    f.write("%s\t%i\t%s\n" % (cat, mtime,
                         '\t'.join(rev_data[cat])))
                else:
                    f.write("%s\t%i\n" % (cat, mtime))
            f.close()
            os.chown(location, -1, portage_gid)
        except IOError as e:
            if f is not None:
                f.discard()
            if e.errno != errno.EACCES:
                raise
            logger.warning("unable to update vdb virtuals cache due to "
                "lacking permissions")
    finally:
        os.umask(old_umask)
Пример #5
0
def update_pkg_desc_index(repo, observer):
    """Update a repo's package description cache (metadata/pkg_desc_index)"""
    ret = 0
    pkg_desc_index = pjoin(repo.location, "metadata", "pkg_desc_index")
    f = None
    try:
        f = AtomicWriteFile(pkg_desc_index)
        for cat, pkgs in sorted(repo.packages.items()):
            for pkg in sorted(pkgs):
                cpvs = sorted(
                    CPV(cat, pkg, v) for v in repo.versions[(cat, pkg)])
                # get the most recent pkg description, skipping bad pkgs
                for cpv in reversed(cpvs):
                    try:
                        desc = repo[(cat, pkg, cpv.fullver)].description
                        versions = ' '.join(x.fullver for x in cpvs)
                        f.write(f"{cat}/{pkg} {versions}: {desc}\n")
                        break
                    except MetadataException as e:
                        # should be caught and outputted already by cache regen
                        ret = 1
        f.close()
    except IOError as e:
        observer.error(
            f"Unable to update pkg_desc_index file {pkg_desc_index!r}: {e.strerror}"
        )
        ret = os.EX_IOERR
    finally:
        if f is not None:
            f.discard()

    return ret
Пример #6
0
def update_ldso(ld_search_path, offset='/'):
    # we do an atomic rename instead of open and write quick
    # enough (avoid the race iow)
    fp = pjoin(offset, 'etc', 'ld.so.conf')
    new_f = AtomicWriteFile(
        fp, uid=os_data.root_uid, gid=os_data.root_uid, perms=0o644)
    new_f.write("# automatically generated, edit env.d files instead\n")
    new_f.writelines(x.strip()+"\n" for x in ld_search_path)
    new_f.close()
Пример #7
0
    def write_cache(self, data: dict) -> None:
        """
        Write @data to the cache file, if one is specified.
        """

        if self.args.cache_file is not None:
            with AtomicWriteFile(self.args.cache_file) as f:
                json.dump(data, f, indent=2)
Пример #8
0
 def flush(self):
     f = None
     try:
         f = AtomicWriteFile(self.path, gid=self.gid, perms=self.mode)
         f.write("\n".join(str(x) for x in sorted(self._atoms)))
         f.close()
     except:
         if f is not None:
             f.discard()
         raise
Пример #9
0
def update_ldso(ld_search_path, offset='/'):
    # we do an atomic rename instead of open and write quick
    # enough (avoid the race iow)
    fp = pjoin(offset, 'etc', 'ld.so.conf')
    new_f = AtomicWriteFile(fp, uid=os_data.root_uid, gid=os_data.root_uid, perms=0644)
    new_f.write("# automatically generated, edit env.d files instead\n")
    new_f.writelines(x.strip()+"\n" for x in ld_search_path)
    new_f.close()
Пример #10
0
 def _get_fd(self, write=False):
     if isinstance(self._source, basestring):
         if write:
             return AtomicWriteFile(self._source, uid=os_data.root_uid,
                 gid=os_data.root_gid, perms=0644)
         return readlines_ascii(self._source, True)
     fobj = self._source.text_fileobj(writable=write)
     if write:
         fobj.seek(0, 0)
         fobj.truncate(0)
     return fobj
Пример #11
0
 def add(self, record):
     # locking, because this should be as atomic as possible
     lock_file = '%s.lock' % self.fname
     with open(lock_file, 'w') as lock_fp:
         lockf(lock_fp, LOCK_EX)
         try:  # this makes sure that the lock is released if something bad happens
             self._init()  # refresh the object to make sure that we have latest data
             taken = []
             for i in range(len(self)):
                 if os.path.basename(record.delta.fname) == self[i].delta.fname:
                     taken.append(i)
             for i in taken:
                 del self[i]
             self.append(record)
             fp = AtomicWriteFile(self.fname)
             fp.write('\n--\n'.join(map(str, self)))
             fp.close()
         finally:
             lockf(lock_fp, LOCK_UN)
             try:
                 os.remove(lock_file)
             except:
                 pass
Пример #12
0
 def _write_data(self):
     handler = None
     try:
         try:
             handler = AtomicWriteFile(self._location)
             self._serialize_to_handle(list(self.data.items()), handler)
             handler.close()
         except PermissionError as e:
             logger.error(
                 f'failed writing binpkg cache to {self._location!r}: {e}')
     finally:
         if handler is not None:
             handler.discard()
Пример #13
0
def update_keywords_in_file(path: Path,
                            keywords: typing.Iterable[str],
                            stable: bool
                            ) -> None:
    """
    Update KEYWORDS entry in the file at @path.  @keywords specifies
    a list of keywords, @stable indicates whether they should be stable
    or ~arch.

    Raises KeywordsNotFound if no suitable KEYWORDS variable is found.
    """

    with open(path, 'r') as f:
        data = f.readlines()

    for i, l in enumerate(data):
        m = KEYWORDS_RE.match(l)
        if m is None:
            continue

        kw = update_keywords(m.group('keywords').split(),
                             keywords, stable=stable)
        if kw is None:
            # no update?
            return

        new_kw = ' '.join(kw)
        # add quotes if there were none before
        if not m.group('quote'):
            new_kw = f'"{new_kw}"'
        data[i] = f'{m.group("pre")}{new_kw}{m.group("post")}\n'
        break

    # update copyright if necessary
    data[0] = update_copyright(data[0])

    with AtomicWriteFile(path) as f:
        f.writelines(data)
Пример #14
0
def perform_env_update(root, skip_ldso_update=False):
    d, inc, colon = collapse_envd(pjoin(root, "etc/env.d"))

    l = d.pop("LDPATH", None)
    if l is not None and not skip_ldso_update:
        update_ldso(l, root)

    string_collapse_envd(d, inc, colon)

    new_f = AtomicWriteFile(pjoin(root, "etc", "profile.env"),
                            uid=os_data.root_uid,
                            gid=os_data.root_gid,
                            perms=0644)
    new_f.write("# autogenerated.  update env.d instead\n")
    new_f.writelines('export %s="%s"\n' % (k, d[k]) for k in sorted(d))
    new_f.close()
    new_f = AtomicWriteFile(pjoin(root, "etc", "profile.csh"),
                            uid=os_data.root_uid,
                            gid=os_data.root_gid,
                            perms=0644)
    new_f.write("# autogenerated, update env.d instead\n")
    new_f.writelines('setenv %s="%s"\n' % (k, d[k]) for k in sorted(d))
    new_f.close()
Пример #15
0
class http_syncer(base.Syncer):
    """Syncer that fetches files over HTTP(S)."""

    forcable = True

    def __init__(self, basedir, uri, dest=None, **kwargs):
        self.basename = os.path.basename(uri)
        super().__init__(basedir, uri, **kwargs)

    def _sync(self, verbosity, output_fd, force=False, **kwargs):
        dest = self._pre_download()

        if self.uri.lower().startswith('https://'):
            # default to using system ssl certs
            context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
        else:
            context = None

        headers = {}
        etag_path = pjoin(self.basedir, '.etag')
        modified_path = pjoin(self.basedir, '.modified')

        if not force:
            # use cached ETag to check if updates exist
            previous_etag = readfile_ascii(etag_path, none_on_missing=True)
            if previous_etag:
                headers['If-None-Match'] = previous_etag

            # use cached modification timestamp to check if updates exist
            previous_modified = readfile_ascii(modified_path,
                                               none_on_missing=True)
            if previous_modified:
                headers['If-Modified-Since'] = previous_modified

        req = urllib.request.Request(self.uri, headers=headers, method='GET')

        # TODO: add customizable timeout
        try:
            resp = urllib.request.urlopen(req, context=context)
        except urllib.error.URLError as e:
            if e.getcode() == 304:  # Not Modified
                logger.debug("content is unchanged")
                return True
            raise base.SyncError(
                f'failed fetching {self.uri!r}: {e.reason}') from e

        # Manually check cached values ourselves since some servers appear to
        # ignore If-None-Match or If-Modified-Since headers.
        convert = lambda x: x.strip() if x else None
        etag = resp.getheader('ETag')
        modified = resp.getheader('Last-Modified')
        if not force:
            if etag is not None and convert(etag) == convert(previous_etag):
                logger.debug(f"etag {etag} is equal, no update available")
                return True
            if modified is not None and convert(modified) == convert(
                    previous_modified):
                logger.debug(f"header mtime is unmodified: {modified}")
                return True

        try:
            os.makedirs(self.basedir, exist_ok=True)
        except OSError as e:
            raise base.SyncError(
                f'failed creating repo dir {self.basedir!r}: {e.strerror}'
            ) from e

        length = resp.getheader('content-length')
        if length:
            length = int(length)
            blocksize = max(4096, length // 100)
        else:
            blocksize = 1000000

        try:
            self._download = AtomicWriteFile(dest, binary=True, perms=0o644)
        except OSError as e:
            raise base.PathError(self.basedir, e.strerror) from e

        # retrieve the file while providing simple progress output
        size = 0
        while True:
            buf = resp.read(blocksize)
            if not buf:
                if length:
                    sys.stdout.write('\n')
                break
            self._download.write(buf)
            size += len(buf)
            if length:
                sys.stdout.write('\r')
                progress = '=' * int(size / length * 50)
                percent = int(size / length * 100)
                sys.stdout.write("[%-50s] %d%%" % (progress, percent))

        self._post_download(dest)

        # TODO: store this in pkgcore cache dir instead?
        # update cached ETag/Last-Modified values
        if etag:
            with open(etag_path, 'w') as f:
                f.write(etag)
        if modified:
            with open(modified_path, 'w') as f:
                f.write(modified)

        return True

    def _pre_download(self):
        """Pre-download initialization.

        Returns file path to download file to.
        """
        return pjoin(self.basedir, self.basename)

    def _post_download(self, path):
        """Post-download file processing.

        Args:
            path (str): path to downloaded file
        """
        # atomically create file
        self._download.close()
Пример #16
0
    def _sync(self, verbosity, output_fd, force=False, **kwargs):
        dest = self._pre_download()

        if self.uri.startswith('https://'):
            # default to using system ssl certs
            context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
        else:
            context = None

        headers = {}
        etag_path = pjoin(self.basedir, '.etag')
        modified_path = pjoin(self.basedir, '.modified')

        if not force:
            # use cached ETag to check if updates exist
            previous_etag = None
            try:
                with open(etag_path, 'r') as f:
                    previous_etag = f.read()
            except FileNotFoundError:
                pass
            if previous_etag:
                headers['If-None-Match'] = previous_etag

            # use cached modification timestamp to check if updates exist
            previous_modified = None
            try:
                with open(modified_path, 'r') as f:
                    previous_modified = f.read()
            except FileNotFoundError:
                pass
            if previous_modified:
                headers['If-Modified-Since'] = previous_modified

        req = urllib.request.Request(self.uri, headers=headers, method='GET')

        # TODO: add customizable timeout
        try:
            resp = urllib.request.urlopen(req, context=context)
        except urllib.error.URLError as e:
            if e.code == 304:
                # TODO: raise exception to notify user the repo is up to date?
                return True
            raise base.SyncError(
                f'failed fetching {self.uri!r}: {e.reason}') from e

        # Manually check cached values ourselves since some servers appear to
        # ignore If-None-Match or If-Modified-Since headers.
        etag = resp.getheader('ETag', '')
        modified = resp.getheader('Last-Modified', '')
        if not force:
            if etag == previous_etag:
                return True
            if modified == previous_modified:
                return True

        try:
            os.makedirs(self.basedir, exist_ok=True)
        except OSError as e:
            raise base.SyncError(
                f'failed creating repo dir {self.basedir!r}: {e.strerror}'
            ) from e

        length = resp.getheader('content-length')
        if length:
            length = int(length)
            blocksize = max(4096, length // 100)
        else:
            blocksize = 1000000

        try:
            self._download = AtomicWriteFile(dest, binary=True, perms=0o644)
        except OSError as e:
            raise base.PathError(self.basedir, e.strerror) from e

        # retrieve the file while providing simple progress output
        size = 0
        while True:
            buf = resp.read(blocksize)
            if not buf:
                if length:
                    sys.stdout.write('\n')
                break
            self._download.write(buf)
            size += len(buf)
            if length:
                sys.stdout.write('\r')
                progress = '=' * int(size / length * 50)
                percent = int(size / length * 100)
                sys.stdout.write("[%-50s] %d%%" % (progress, percent))

        self._post_download(dest)

        # TODO: store this in pkgcore cache dir instead?
        # update cached ETag/Last-Modified values
        if etag:
            with open(etag_path, 'w') as f:
                f.write(etag)
        if modified:
            with open(modified_path, 'w') as f:
                f.write(modified)

        return True
Пример #17
0
def perform_env_update(root, skip_ldso_update=False):
    d, inc, colon = collapse_envd(pjoin(root, "etc/env.d"))

    l = d.pop("LDPATH", None)
    if l is not None and not skip_ldso_update:
        update_ldso(l, root)

    string_collapse_envd(d, inc, colon)

    new_f = AtomicWriteFile(
        pjoin(root, "etc", "profile.env"),
        uid=os_data.root_uid, gid=os_data.root_gid, perms=0644)
    new_f.write("# autogenerated.  update env.d instead\n")
    new_f.writelines('export %s="%s"\n' % (k, d[k]) for k in sorted(d))
    new_f.close()
    new_f = AtomicWriteFile(
        pjoin(root, "etc", "profile.csh"),
        uid=os_data.root_uid, gid=os_data.root_gid, perms=0644)
    new_f.write("# autogenerated, update env.d instead\n")
    new_f.writelines('setenv %s="%s"\n' % (k, d[k]) for k in sorted(d))
    new_f.close()
Пример #18
0
    def _sync(self, verbosity, output_fd, force=False, **kwargs):
        dest = self._pre_download()

        if self.uri.startswith('https://'):
            # default to using system ssl certs
            context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
        else:
            context = None

        headers = {}
        etag_path = pjoin(self.basedir, '.etag')
        modified_path = pjoin(self.basedir, '.modified')

        if not force:
            # use cached ETag to check if updates exist
            previous_etag = None
            try:
                with open(etag_path, 'r') as f:
                    previous_etag = f.read()
            except FileNotFoundError:
                pass
            if previous_etag:
                headers['If-None-Match'] = previous_etag

            # use cached modification timestamp to check if updates exist
            previous_modified = None
            try:
                with open(modified_path, 'r') as f:
                    previous_modified = f.read()
            except FileNotFoundError:
                pass
            if previous_modified:
                headers['If-Modified-Since'] = previous_modified

        req = urllib.request.Request(self.uri, headers=headers, method='GET')

        # TODO: add customizable timeout
        try:
            resp = urllib.request.urlopen(req, context=context)
        except urllib.error.URLError as e:
            if e.code == 304:
                # TODO: raise exception to notify user the repo is up to date?
                return True
            raise base.SyncError(f'failed fetching {self.uri!r}: {e.reason}') from e

        # Manually check cached values ourselves since some servers appear to
        # ignore If-None-Match or If-Modified-Since headers.
        etag = resp.getheader('ETag', '')
        modified = resp.getheader('Last-Modified', '')
        if not force:
            if etag == previous_etag:
                return True
            if modified == previous_modified:
                return True

        try:
            os.makedirs(self.basedir, exist_ok=True)
        except OSError as e:
            raise base.SyncError(
                f'failed creating repo dir {self.basedir!r}: {e.strerror}') from e

        length = resp.getheader('content-length')
        if length:
            length = int(length)
            blocksize = max(4096, length//100)
        else:
            blocksize = 1000000

        try:
            self._download = AtomicWriteFile(dest, binary=True, perms=0o644)
        except OSError as e:
            raise base.PathError(self.basedir, e.strerror) from e

        # retrieve the file while providing simple progress output
        size = 0
        while True:
            buf = resp.read(blocksize)
            if not buf:
                if length:
                    sys.stdout.write('\n')
                break
            self._download.write(buf)
            size += len(buf)
            if length:
                sys.stdout.write('\r')
                progress = '=' * int(size / length * 50)
                percent = int(size / length * 100)
                sys.stdout.write("[%-50s] %d%%" % (progress, percent))

        self._post_download(dest)

        # TODO: store this in pkgcore cache dir instead?
        # update cached ETag/Last-Modified values
        if etag:
            with open(etag_path, 'w') as f:
                f.write(etag)
        if modified:
            with open(modified_path, 'w') as f:
                f.write(modified)

        return True