Example #1
0
 def test_demandload(self):
     scope = {}
     demandload.demandload(scope, 'snakeoil:demandload')
     self.assertNotIdentical(demandload, scope['demandload'])
     self.assertIdentical(
         demandload.demandload, scope['demandload'].demandload)
     self.assertIdentical(demandload, scope['demandload'])
Example #2
0
# Copyright: 2006-2011 Brian Harring <*****@*****.**>
# License: GPL2/BSD

__all__ = ("rsync_syncer", "rsync_timestamp_syncer",)

from snakeoil.demandload import demandload

from pkgcore.config import ConfigHint
from pkgcore.sync import base

demandload(
    'errno',
    'os',
    'socket',
    'tempfile',
    'time',
    'snakeoil.compatibility:raise_from',
    'snakeoil.osutils:pjoin',
)


class rsync_syncer(base.ExternalSyncer):

    default_excludes = ['/distfiles', '/local', '/packages']
    default_includes = []
    default_conn_timeout = 15
    default_opts = [
        '--recursive',
        '--delete',
        '--delete-delay',
        '--perms',
Example #3
0
    "describe_class", "describe_class_main",
    "configurables", "configurables_main",
    "dump_uncollapsed", "dump_uncollapsed_main"
)

from functools import partial

from snakeoil.demandload import demandload

from pkgcore.config import errors, basics
from pkgcore.ebuild import atom
from pkgcore.plugin import get_plugins
from pkgcore.util import commandline

demandload(
    'textwrap',
    'traceback',
)


def dump_section(config, out):
    out.first_prefix.append('    ')
    out.write('# typename of this section: %s' % (config.type.name,))
    out.write('class %s.%s;' % (config.type.callable.__module__,
                                config.type.callable.__name__))
    if config.default:
        out.write('default true;')
    for key, val in sorted(config.config.iteritems()):
        typename = config.type.types.get(key)
        if typename is None:
            if config.type.allow_unknowns:
                typename = 'str'
Example #4
0
    "uninstall",
    "replace",
    "fetch_base",
    "empty_build_op",
    "FailedDirectory",
    "GenericBuildError",
    "errors",
)

from pkgcore import operations as _operations_mod
from snakeoil.dependant_methods import ForcedDepends
from snakeoil import klass

from snakeoil import demandload

demandload.demandload(globals(), "pkgcore:fetch@_fetch_module", "snakeoil.lists:iflatten_instance")


class fetch_base(object):
    def __init__(self, domain, pkg, fetchables, fetcher):
        self.verified_files = {}
        self._basenames = set()
        self.domain = domain
        self.pkg = pkg
        self.fetchables = fetchables
        self.fetcher = fetcher

    def fetch_all(self, observer):
        for fetchable in self.fetchables:
            if not self.fetch_one(fetchable, observer):
                return False
Example #5
0
import os
import signal

from pkgcore import const, os_data
from pkgcore.ebuild import const as e_const
import pkgcore.spawn

from snakeoil import klass
from snakeoil.currying import pretty_docs
from snakeoil.demandload import demandload
from snakeoil.osutils import abspath, normpath, pjoin
from snakeoil.weakrefs import WeakRefFinalizer

demandload(
    'traceback',
    'snakeoil:fileutils',
    'pkgcore.log:logger',
)


def _single_thread_allowed(functor):
    def _inner(*args, **kwds):
        _acquire_global_ebp_lock()
        try:
            return functor(*args, **kwds)
        finally:
            _release_global_ebp_lock()
    _inner.func = functor
    pretty_docs(_inner, name=functor.__name__)
    return _inner
Example #6
0
# License: GPL2/BSD

"""
repository modifications (installing, removing, replacing)
"""

__all__ = ("Failure", "base", "install", "uninstall", "replace")

from snakeoil.dependant_methods import ForcedDepends
from snakeoil.weakrefs import WeakRefFinalizer
from snakeoil.demandload import demandload
demandload(globals(), "pkgcore.log:logger",
    "pkgcore.merge.engine:MergeEngine",
    "pkgcore.merge:errors@merge_errors",
    "pkgcore.package.mutated:MutatedPkg",
    'errno',
    "shutil",
    "tempfile",
    'snakeoil:osutils',
    )


class fake_lock(object):
    def __init__(self):
        pass

    acquire_write_lock = acquire_read_lock = __init__
    release_read_lock = release_write_lock = __init__


class finalizer_base(WeakRefFinalizer, ForcedDepends):
Example #7
0
    @copyright: 2012-2015 by Brian Dolbec <*****@*****.**>
    @copyright: 2014-2015 by Pavlos Ratis <*****@*****.**>
    @license: GNU GPL2, see COPYING for details.
"""

import os
import re

from snakeoil.demandload import demandload

from gkeys.gkey import GKEY
from gkeys.seed import Seeds, decoder

demandload(
    "json:load",
    "gkeys.exception:UpdateDbError",
    "gkeys.fileops:ensure_dirs",
)


class SeedHandler(object):

    def __init__(self, logger, config):
        self.config = config
        self.logger = logger
        self.fingerprint_re = re.compile('[0-9A-Fa-f]{40}')
        self.finerprint_re2 = re.compile('[0-9A-Fa-f]{4}( [0-9A-Fa-f]{4}){9}')
        self.seeds = None


    def new(self, args, checkgkey=False):
Example #8
0
pkgset based around loading a list of atoms from a world file
"""

__all__ = ("FileList", "WorldFile")

from snakeoil import compatibility, klass
from snakeoil.demandload import demandload

from pkgcore.config import ConfigHint, errors
from pkgcore.ebuild import const
from pkgcore.ebuild.atom import atom
from pkgcore.package.errors import InvalidDependency

demandload(
    'snakeoil.fileutils:AtomicWriteFile,readlines_ascii',
    'pkgcore:os_data',
    'pkgcore.log:logger',
)


class FileList(object):
    pkgcore_config_type = ConfigHint({'location': 'str'}, typename='pkgset')
    error_on_subsets = True

    def __init__(self, location, gid=os_data.portage_gid, mode=0644):
        self.path = location
        self.gid = gid
        self.mode = mode
        # note that _atoms is generated on the fly.

    @klass.jit_attr
Example #9
0
"""
XPAK container support
"""

__all__ = ("MalformedXpak", "Xpak")

from collections import OrderedDict

from snakeoil import compatibility, klass
from snakeoil import struct_compat as struct
from snakeoil.compatibility import raise_from
from snakeoil.demandload import demandload
from snakeoil.mappings import autoconvert_py3k_methods_metaclass

demandload(
    "errno",
    "os",
)

# format is:
# XPAKPACKIIIIDDDD[index][data]XPAKSTOPOOOOSTOP
# first; all ints/longs are big endian
# meanwhile, 8 byte format magic
# 4 bytes of index len,
# 4 bytes of data len
# index items: 4 bytes (len of the key name), then that length of key data
#   finally, 2 longs; relative offset from data block start, length of the data
#   repeats till index is full processed
# for data, just a big blob; offsets into it are determined via the index
#   table.
# finally, trailing magic, 4 bytes (positive) of the # of bytes to seek to
#   reach the end of the magic, and 'STOP'.  offset is relative to EOS for Xpak
Example #10
0
# Copyright: 2006 Brian Harring <*****@*****.**>
# License: BSD/GPL2

from itertools import ifilterfalse, chain, groupby
from operator import attrgetter, itemgetter

from pkgcore.fetch import fetchable
from snakeoil import mappings
from snakeoil.demandload import demandload

from pkgcheck import base, addons

demandload(
    'os',
    'snakeoil.osutils:listdir_dirs,listdir_files,pjoin',
    'snakeoil.sequences:iflatten_instance',
    'snakeoil.strings:pluralism',
    'pkgcore:fetch',
    'pkgcore.ebuild.profiles:ProfileStack',
)


class UnusedGlobalFlags(base.Warning):
    """Unused use.desc flag(s)."""

    __slots__ = ("flags", )

    threshold = base.repository_feed

    def __init__(self, flags):
        super(UnusedGlobalFlags, self).__init__()
        # tricky, but it works; atoms have the same attrs
Example #11
0
# License: GPL2/BSD
"""gentoo ebuild specific base package class"""

__all__ = ("CPV", "versioned_CPV", "unversioned_CPV")

from itertools import izip

from snakeoil.compatibility import cmp
from snakeoil.demandload import demandload, demand_compile_regexp
from snakeoil.klass import inject_richcmp_methods_from_cmp

from pkgcore.ebuild.errors import InvalidCPV
from pkgcore.package import base

# do this to break the cycle.
demandload("pkgcore.ebuild:atom")

demand_compile_regexp('suffix_regexp', '^(alpha|beta|rc|pre|p)(\\d*)$')

suffix_value = {"pre": -2, "p": 1, "alpha": -4, "beta": -3, "rc": -1}

# while the package section looks fugly, there is a reason for it-
# to prevent version chunks from showing up in the package

demand_compile_regexp(
    'isvalid_version_re',
    r"^(?:\d+)(?:\.\d+)*[a-zA-Z]?(?:_(p(?:re)?|beta|alpha|rc)\d*)*$")

demand_compile_regexp('isvalid_cat_re',
                      r"^(?:[a-zA-Z0-9][-a-zA-Z0-9+._]*(?:/(?!$))?)+$")
Example #12
0
from __future__ import print_function

import os
import sys

if sys.version_info[0] >= 3:
    _unicode = str
else:
    _unicode = unicode

from snakeoil.demandload import demandload

demandload(
    "json:load",
    "gkeys.lib:GkeysGPG",
    "gkeys.keyhandler:KeyHandler",
)


class ActionBase(object):
    '''Base actions class holding comon functions and init'''
    def __init__(self, config, output=None, logger=None):
        self.config = config
        self.output = output
        self.logger = logger
        self.seeds = None
        self._seedhandler = None
        self._keyhandler = None
        self._gpg = None
        self.category = None
Example #13
0
conditional attributes on a package.

Changing them triggering regen of other attributes on the package instance.
"""

__all__ = ("make_wrapper", )

from functools import partial
from operator import attrgetter

from snakeoil.containers import LimitedChangeSet, Unchangable
from snakeoil.demandload import demandload

from pkgcore.package.base import wrapper

demandload("copy:copy", )


def _getattr_wrapped(attr, self):
    o = self._cached_wrapped.get(attr)
    if o is None or o[0] != self._reuse_pt:
        o = self._wrapped_attr[attr](getattr(self._raw_pkg, attr),
                                     self._configurable)
        o = self._cached_wrapped[attr] = (self._reuse_pt, o)
    return o[1]


def make_wrapper(configurable_attribute_name,
                 attributes_to_wrap=(),
                 kls_injections={}):
    """
Example #14
0
from snakeoil import compatibility
from snakeoil.demandload import demandload
from snakeoil.osutils import listdir_files, pjoin, ensure_dirs, normpath

from pkgcore.config import ConfigHint
from pkgcore.merge import errors, const

demandload(
    'errno',
    'math:floor',
    'os',
    're',
    'time',
    'snakeoil.bash:iter_read_bash',
    'pkgcore:os_data,spawn',
    'pkgcore.fs:fs,contents',
    'pkgcore.fs.livefs:gen_obj',
    'pkgcore.operations.observer:threadsafe_repo_observer',
    'pkgcore.package.mutated:MutatedPkg',
    'pkgcore.plugin:get_plugin',
    'pkgcore.util:file_type,thread_pool',
)

UNINSTALLING_MODES = (const.REPLACE_MODE, const.UNINSTALL_MODE)
INSTALLING_MODES = (const.REPLACE_MODE, const.INSTALL_MODE)


class base(object):
    """base trigger class
Example #15
0
# Copyright: 2011 Brian Harring <*****@*****.**>
# License: GPL2/BSD 3 clause

import threading
import Queue

from snakeoil import compatibility
from snakeoil.demandload import demandload

demandload('snakeoil.process:get_proc_count', )


def reclaim_threads(threads):
    for x in threads:
        try:
            x.join()
        except compatibility.IGNORED_EXCEPTIONS:
            raise
        except Exception as e:
            # should do something better here
            pass


def map_async(iterable, functor, *args, **kwds):
    per_thread_args = kwds.pop("per_thread_args", lambda: ())
    per_thread_kwds = kwds.pop("per_thread_kwds", lambda: {})
    parallelism = kwds.pop("threads", None)
    if parallelism is None:
        parallelism = get_proc_count()

    if hasattr(iterable, '__len__'):
Example #16
0
__all__ = (
    "syncer_exception", "uri_exception", "generic_exception",
    "missing_local_user", "missing_binary", "syncer", "ExternalSyncer",
    "dvcs_syncer", "GenericSyncer", "DisabledSyncer",
    "AutodetectSyncer",
)

from snakeoil import compatibility
from snakeoil.demandload import demandload

from pkgcore.config import ConfigHint, configurable

demandload(
    'os',
    'pwd',
    'stat',
    'errno',
    'pkgcore:os_data,plugin,spawn',
)


class syncer_exception(Exception):
    pass


class uri_exception(syncer_exception):
    pass


class generic_exception(syncer_exception):
    pass
Example #17
0
Changing them triggering regen of other attributes on the package instance.
"""

__all__ = ("make_wrapper",)

from functools import partial
from operator import attrgetter

from snakeoil.containers import LimitedChangeSet, Unchangable
from snakeoil.demandload import demandload

from pkgcore.package.base import wrapper

demandload(
    "copy:copy",
)


def _getattr_wrapped(attr, self):
    o = self._cached_wrapped.get(attr)
    if o is None or o[0] != self._reuse_pt:
        o = self._wrapped_attr[attr](
            getattr(self._raw_pkg, attr),
            self._configurable,
            pkg=self)
        o = self._cached_wrapped[attr] = (self._reuse_pt, o)
    return o[1]


def make_wrapper(wrapped_repo, configurable_attribute_name, attributes_to_wrap=(),
Example #18
0
# Copyright: 2006-2011 Brian Harring <*****@*****.**>
# License: BSD/GPL2

from snakeoil.demandload import demandload

from pkgcore.util import commandline

demandload(
    'snakeoil:osutils,currying',
    "pkgcore.ebuild:atom,conditionals,eapi",
    "pkgcore.restrictions.boolean:AndRestriction",
    "pkgcore.util:packages",
)


def str_pkg(pkg):
    pkg = packages.get_raw_pkg(pkg)
    # special casing; old style virtuals come through as the original pkg.
    if pkg.package_is_real:
        return pkg.cpvstr
    if hasattr(pkg, "actual_pkg"):
        return pkg.actual_pkg.cpvstr
    # icky, but works.
    return str(pkg.rdepends).lstrip("=")


def get_atom_kls(value):
    eapi_obj = eapi.get_eapi(value)
    if eapi_obj is None:
        raise ValueError("eapi %s isn't known/supported" % (value, ))
    return eapi_obj.atom_kls
Example #19
0
gentoo ebuild atom, should be generalized into an agnostic base
"""

__all__ = ("atom", "transitive_use_atom", "generate_collapsed_restriction")

import string
from pkgcore.restrictions import values, packages, boolean
from pkgcore.ebuild import cpv, errors, const, restricts
from snakeoil.compatibility import is_py3k, cmp, raise_from
from snakeoil.klass import (generic_equality, inject_richcmp_methods_from_cmp,
    reflective_hash, alias_attr)
from snakeoil.demandload import demandload
from snakeoil.currying import partial
demandload(globals(),
    "pkgcore.restrictions.delegated:delegate",
    'pkgcore.restrictions.packages:Conditional,AndRestriction@PkgAndRestriction',
    'pkgcore.restrictions.values:ContainmentMatch',
    'collections:defaultdict',
)

# namespace compatibility...
MalformedAtom = errors.MalformedAtom

alphanum = set(string.digits)
if is_py3k:
    alphanum.update(string.ascii_letters)
else:
    alphanum.update(string.letters)

valid_repo_chars = set(alphanum)
valid_repo_chars.update("_-")
valid_use_chars = set(alphanum)
Example #20
0
# License: BSD/GPL2
"""Various argparse actions, types, and miscellaneous extensions."""

import argparse
from functools import partial
import os
import sys

from snakeoil import compatibility
from snakeoil.klass import patch
from snakeoil.demandload import demandload

demandload(
    'inspect',
    'logging',
    'textwrap:dedent',
    'snakeoil:osutils',
    'snakeoil.version:get_version',
    'snakeoil.sequences:split_negations',
)

# Enable flag to pull extended docs keyword args into arguments during doc
# generation, when disabled the keyword is silently discarded.
_generate_docs = False


@patch(argparse.ArgumentParser, 'add_subparsers')
@patch(argparse._SubParsersAction, 'add_parser')
@patch(argparse._ActionsContainer, 'add_mutually_exclusive_group')
@patch(argparse._ActionsContainer, 'add_argument_group')
@patch(argparse._ActionsContainer, 'add_argument')
def _add_argument_docs(orig_func, self, *args, **kwargs):
Example #21
0
# License: GPL2/BSD

"""
build operation
"""

__all__ = ('build_base', 'install', 'uninstall', 'replace', 'fetch_base',
    'empty_build_op', 'FailedDirectory', 'GenericBuildError', 'errors')

from pkgcore import operations as _operations_mod
from snakeoil.dependant_methods import ForcedDepends
from snakeoil import klass

from snakeoil import demandload
demandload.demandload(globals(),
    'pkgcore:fetch@_fetch_module',
    'snakeoil.lists:iflatten_instance',
)


class fetch_base(object):

    def __init__(self, domain, pkg, fetchables, fetcher):
        self.verified_files = {}
        self._basenames = set()
        self.domain = domain
        self.pkg = pkg
        self.fetchables = fetchables
        self.fetcher = fetcher

    def fetch_all(self, observer):
        for fetchable in self.fetchables:
Example #22
0
# Copyright: 2011 Brian Harring <*****@*****.**>
# License: GPL2/BSD 3 clause

import threading
import Queue
import time
from snakeoil import compatibility
from snakeoil.demandload import demandload
demandload(
    globals(),
    'snakeoil.process:get_proc_count',
)


def reclaim_threads(threads):
    for x in threads:
        try:
            x.join()
        except compatibility.IGNORED_EXCEPTIONS:
            raise
        except Exception as e:
            # should do something better here
            pass


def map_async(iterable, functor, *args, **kwds):
    per_thread_args = kwds.pop("per_thread_args", lambda: ())
    per_thread_kwds = kwds.pop("per_thread_kwds", lambda: {})
    parallelism = kwds.pop("threads", None)
    if parallelism is None:
        parallelism = get_proc_count()
Example #23
0
Changing them triggering regen of other attributes on the package instance.
"""

__all__ = ("make_wrapper",)

from operator import attrgetter

from snakeoil.containers import LimitedChangeSet, Unchangable
from snakeoil.currying import partial

from pkgcore.package.base import wrapper

from snakeoil.demandload import demandload
demandload(globals(),
    "copy",
)


def _getattr_wrapped(attr, self):
    o = self._cached_wrapped.get(attr)
    if o is None or o[0] != self._reuse_pt:
        o = self._wrapped_attr[attr](getattr(self._raw_pkg, attr),
                                     self._configurable)
        o = self._cached_wrapped[attr] = (self._reuse_pt, o)
    return o[1]


def make_wrapper(configurable_attribute_name, attributes_to_wrap=(),
                 kls_injections={}):
    """
Example #24
0
from snakeoil.sequences import iflatten_instance, stable_unique
from snakeoil.strings import pluralism

from pkgcore.ebuild import resolver, restricts
from pkgcore.ebuild.atom import atom
from pkgcore.ebuild.misc import run_sanity_checks
from pkgcore.merge import errors as merge_errors
from pkgcore.operations import observer, format
from pkgcore.repository.util import get_raw_repos
from pkgcore.resolver.util import reduce_to_failures
from pkgcore.restrictions import packages
from pkgcore.restrictions.boolean import OrRestriction
from pkgcore.util import commandline, parserestrict

demandload(
    'textwrap:dedent',
    'pkgcore.repository.virtual:RestrictionRepo',
)

argparser = commandline.ArgumentParser(domain=True,
                                       description=__doc__,
                                       script=(__file__, __name__))
argparser.add_argument(nargs='*',
                       dest='targets',
                       metavar='TARGET',
                       action=commandline.StoreTarget,
                       use_sets='sets',
                       help="extended package matching",
                       docs=commandline.StoreTarget.__doc__.split('\n')[1:])

operation_args = argparser.add_argument_group('operations')
operation_options = operation_args.add_mutually_exclusive_group()
Example #25
0
# Copyright: 2011-2012 Brian Harring <*****@*****.**>
# License: GPL2/BSD 3 clause

import os
import sys

from snakeoil.demandload import demandload
demandload(
    globals(),
    'subprocess',
    'snakeoil.osutils:access',
    'snakeoil.fileutils:readlines_ascii',
)

def _parse_cpuinfo():
    data = readlines_ascii("/proc/cpuinfo", True, True, False)
    procs = []
    current = []
    for line in data:
        if not line:
            if current:
                procs.append(current)
                current = []
        else:
            current.append(line.split(":", 1))
    return [{k.strip(): v.strip() for k, v in items} for items in procs]

def _get_linux_physical_proc_count():
    procs = _parse_cpuinfo()
    if not procs:
        return _get_linux_proc_count()
Example #26
0
# Copyright: 2006 Brian Harring <*****@*****.**>
# License: BSD/GPL2

import codecs
from collections import deque
import os
import stat

from pkgcore.ebuild.atom import MalformedAtom, atom
from snakeoil.demandload import demandload
from snakeoil.osutils import listdir, pjoin, sizeof_fmt
from snakeoil.strings import pluralism

from pkgcheck.base import Error, Warning, Template, package_feed

demandload('errno')

allowed_filename_chars = "a-zA-Z0-9._-+:"
allowed_filename_chars_set = set()
allowed_filename_chars_set.update(
    chr(x) for x in xrange(ord('a'),
                           ord('z') + 1))
allowed_filename_chars_set.update(
    chr(x) for x in xrange(ord('A'),
                           ord('Z') + 1))
allowed_filename_chars_set.update(
    chr(x) for x in xrange(ord('0'),
                           ord('9') + 1))
allowed_filename_chars_set.update([".", "-", "_", "+", ":"])

Example #27
0
from collections import OrderedDict
from itertools import chain
import re
import subprocess
import sys

from snakeoil.demandload import demandload
from snakeoil.strings import pluralism

from . import Cli, login_required
from ..exceptions import BiteError
from ..utils import block_edit, get_input, launch_browser

demandload(
    'pprint',
    'urllib.parse:parse_qs',
    'bite:const',
)


class Bugzilla(Cli):
    """CLI for Bugzilla service."""

    _service = 'bugzilla'

    def attach(self, *args, **kw):
        if kw['comment'] is None:
            kw['comment'] = block_edit('Enter optional long description of attachment')
        super().attach(*args, **kw)

    def create(self, *args, **kw):
Example #28
0
# License: BSD/GPL2
"""value restrictions

Works hand in hand with :obj:`pkgcore.restrictions.packages`, these
classes match against a value handed in, package restrictions pull the
attr from a package instance and hand it to their wrapped restriction
(which is a value restriction).
"""

from snakeoil.klass import generic_equality, reflective_hash
from snakeoil.demandload import demandload

from pkgcore.restrictions import restriction, boolean, packages

demandload(
    're',
    'snakeoil.sequences:iflatten_instance',
)

# Backwards compatibility.
value_type = restriction.value_type

try:
    from pkgcore.restrictions import _restrictions as extension
except ImportError:
    extension = None


class base(restriction.base):
    """Base restriction matching object for values.

    Beware: do not check for instances of this to detect value
Example #29
0
from copy import deepcopy
from operator import attrgetter
from urllib.parse import urlencode

from snakeoil.demandload import demandload

from . import Cli

demandload('bite:const')


class Monorail(Cli):
    """CLI for Monorail service."""

    _service = 'monorail'

    def _search_params(self, **kw):
        query = {}
        query_list = []
        options_log = []

        for k, v in ((k, v) for (k, v) in kw.items() if v):
            if k == 'query':
                query_list.append(v)
                options_log.append('  advanced query: {}'.format(v))
            if k == 'attachment':
                query_list.append('attachment:{}'.format(v))
                options_log.append('  attachment: {}'.format(v))
            if k == 'blocked':
                query_list.append('is:blocked')
                options_log.append('  blocked: yes')
Example #30
0
# Copyright: 2007 Markus Ullmann <*****@*****.**>
# License: BSD/GPL2
"""check for some bad coding styles like insinto's, old variables etc"""

from snakeoil.demandload import demandload

from pkgcheck import base

demandload("re")


class BadInsIntoDir(base.Warning):
    """ebuild uses insinto where compact commands exist"""

    threshold = base.versioned_feed

    __slots__ = ("category", "package", "version", "line", "insintodir")

    def __init__(self, pkg, insintodir, line):
        super(BadInsIntoDir, self).__init__()
        self._store_cpv(pkg)
        self.line = line
        self.insintodir = insintodir

    @property
    def short_desc(self):
        return "ebuild uses insinto %s on line %s" % (self.insintodir,
                                                      self.line)


class BadInsIntoCheck(base.Template):
Example #31
0
# Rationale is the former should be a PYTHONPATH issue while the
# latter an installed plugin issue. May have to change this if it
# causes problems.

from collections import defaultdict
from importlib import import_module
import operator
import os.path

from snakeoil import compatibility, mappings, modules, sequences
from snakeoil.demandload import demandload
from snakeoil.osutils import pjoin, listdir_files

from pkgcore import plugins

demandload("errno", "tempfile", "snakeoil:fileutils,osutils", "pkgcore.log:logger")


_plugin_data = sequences.namedtuple("_plugin_data", ["key", "priority", "source", "target"])

PLUGIN_ATTR = "pkgcore_plugins"

CACHE_HEADER = "pkgcore plugin cache v3"
CACHE_FILENAME = "plugincache"


def _clean_old_caches(path):
    for name in ("plugincache2",):
        try:
            osutils.unlink_if_exists(pjoin(path, name))
        except EnvironmentError as e:
Example #32
0
__all__ = ("install", "uninstall", "replace", "operations")

import os
import shutil

from snakeoil import compression
from snakeoil.demandload import demandload
from snakeoil.osutils import ensure_dirs, pjoin, normpath

from pkgcore.const import VERSION
from pkgcore.operations import repo as repo_ops

demandload(
    'time',
    'snakeoil.data_source:local_source',
    'pkgcore.ebuild:conditionals',
    'pkgcore.log:logger',
    'pkgcore.vdb.contents:ContentsFile',
)


def update_mtime(path, timestamp=None):
    if timestamp is None:
        timestamp = time.time()
    logger.debug("updating vdb timestamp for %r", path)
    try:
        os.utime(path, (timestamp, timestamp))
    except EnvironmentError as e:
        logger.error("failed updated vdb timestamp for %r: %s", path, e)

Example #33
0
from snakeoil import compatibility, klass, mappings
from snakeoil.caching import WeakInstMeta
from snakeoil.currying import post_curry
from snakeoil.demandload import demandload
from snakeoil.osutils import pjoin, listdir_files, listdir
from snakeoil.sequences import namedtuple

from pkgcore.config import ConfigHint
from pkgcore.repository import syncable

demandload(
    'errno',
    'snakeoil.bash:BashParseError,iter_read_bash,read_dict',
    'snakeoil.fileutils:readfile,readlines_ascii',
    'snakeoil.sequences:iter_stable_unique',
    'snakeoil.xml:etree',
    'pkgcore.ebuild:atom,profiles',
    'pkgcore.log:logger',
    "pkgcore.restrictions:packages",
)


class Maintainer(object):
    """Data on a single maintainer.

    At least one of email and name is not C{None}.

    :type email: C{unicode} object or C{None}
    :ivar email: email address.
    :type name: C{unicode} object or C{None}
    :ivar name: full name
Example #34
0
# License: GPL2/BSD
"""
repository modifications (installing, removing, replacing)
"""

__all__ = ("Failure", "base", "install", "uninstall", "replace")

from snakeoil.demandload import demandload
from snakeoil.dependant_methods import ForcedDepends
from snakeoil.weakrefs import WeakRefFinalizer

demandload(
    'errno',
    "shutil",
    "tempfile",
    'snakeoil:osutils',
    "pkgcore.log:logger",
    "pkgcore.merge:errors@merge_errors",
    "pkgcore.merge.engine:MergeEngine",
    "pkgcore.package.mutated:MutatedPkg",
)


class fake_lock(object):
    def __init__(self):
        pass

    acquire_write_lock = acquire_read_lock = __init__
    release_read_lock = release_write_lock = __init__


class finalizer_base(WeakRefFinalizer, ForcedDepends):
Example #35
0
# Copyright: 2006-2011 Brian Harring <*****@*****.**>
# License: GPL2/BSD

"""
resolver configuration to match portage behaviour (misbehaviour in a few spots)
"""

__all__ = ["upgrade_resolver", "min_install_resolver"]

from pkgcore.repository import misc, multiplex
from pkgcore.resolver import plan

from snakeoil.demandload import demandload
demandload(globals(),
    'pkgcore.restrictions:packages,values',
)

def upgrade_resolver(vdbs, dbs, verify_vdb=True, nodeps=False,
                     force_replacement=False,
                     resolver_cls=plan.merge_plan, **kwds):

    """
    generate and configure a resolver for upgrading all processed nodes.

    :param vdbs: list of :obj:`pkgcore.repository.prototype.tree` instances
        that represents the livefs
    :param dbs: list of :obj:`pkgcore.repository.prototype.tree` instances
        representing sources of pkgs
    :param verify_vdb: should we stop resolving once we hit the vdb,
        or do full resolution?
    :param force_vdb_virtuals: old style portage virtuals (non metapkgs)
Example #36
0
import os.path
import logging

from pkgcore.config import load_config, errors
from snakeoil import formatters, demandload, currying, modules
from snakeoil import compatibility
import optparse
from pkgcore.util import argparse
from pkgcore.util.commandline_optparse import *

demandload.demandload(globals(),
    'copy@_copy',
    'snakeoil:osutils',
    'snakeoil.errors:walk_exception_chain',
    'snakeoil.lists:iflatten_instance',
    'pkgcore:version@_version',
    'pkgcore.config:basics',
    'pkgcore.restrictions:packages,restriction',
    'pkgcore.util:parserestrict',
    'pkgcore:operations',
    'traceback',
)


class FormattingHandler(logging.Handler):

    """Logging handler printing through a formatter."""

    def __init__(self, formatter):
        logging.Handler.__init__(self)
        # "formatter" clashes with a Handler attribute.
        self.out = formatter
Example #37
0
# Copyright: 2011 Brian Harring <*****@*****.**>
# License: GPL2/BSD 3 clause

from operator import itemgetter
from collections import deque, defaultdict
from snakeoil.osutils import listdir_files, pjoin
from snakeoil.fileutils import readlines
from snakeoil.iterables import chain_from_iterable
from pkgcore.ebuild.atom import atom
from snakeoil.lists import iflatten_instance
from snakeoil import demandload
demandload.demandload(globals(),
    'pkgcore.log:logger',
)

demandload.demand_compile_regexp(globals(), "valid_updates_re", "^(\d)Q-(\d{4})$")


def _scan_directory(path):
    files = []
    for x in listdir_files(path):
        match = valid_updates_re.match(x)
        if match is not None:
            files.append(((match.group(2), match.group(1)), x))
    files.sort(key=itemgetter(0))
    return [x[1] for x in files]

def read_updates(path):
    def f():
        d = deque()
        return [d,d]
Example #38
0
# License: GPL2/BSD
"""
build operation
"""

__all__ = ('build_base', 'install', 'uninstall', 'replace', 'fetch_base',
           'empty_build_op', 'FailedDirectory', 'GenericBuildError', 'errors')

from pkgcore import operations as _operations_mod
from snakeoil.dependant_methods import ForcedDepends
from snakeoil import klass

from snakeoil import demandload
demandload.demandload(
    globals(),
    'pkgcore:fetch@_fetch_module',
    'snakeoil.lists:iflatten_instance',
)


class fetch_base(object):
    def __init__(self, domain, pkg, fetchables, fetcher):
        self.verified_files = {}
        self._basenames = set()
        self.domain = domain
        self.pkg = pkg
        self.fetchables = fetchables
        self.fetcher = fetcher

    def fetch_all(self, observer):
        for fetchable in self.fetchables:
Example #39
0
from snakeoil.weakrefs import WeakValCache

from pkgcore.config import ConfigHint, configurable
from pkgcore.ebuild import ebuild_src
from pkgcore.ebuild import eclass_cache as eclass_cache_module
from pkgcore.operations import repo as _repo_ops
from pkgcore.repository import prototype, errors, configured

demandload(
    'errno',
    'operator:attrgetter',
    'random:shuffle',
    'snakeoil.chksum:get_chksums',
    'snakeoil.data_source:local_source',
    'snakeoil.sequences:iflatten_instance',
    'pkgcore:fetch',
    'pkgcore.ebuild:cpv,digest,ebd,repo_objs,atom,restricts,profiles,processor',
    'pkgcore.ebuild:errors@ebuild_errors',
    'pkgcore.fs.livefs:sorted_scan',
    'pkgcore.log:logger',
    'pkgcore.package:errors@pkg_errors',
    'pkgcore.restrictions:packages',
    'pkgcore.util.packages:groupby_pkg',
)


class repo_operations(_repo_ops.operations):

    def _cmd_implementation_digests(self, domain, matches, observer,
                                    mirrors=False, force=False):
        manifest_config = self.repo.config.manifests
        if manifest_config.disabled:
Example #40
0
    "ExternalSyncer",
    "dvcs_syncer",
    "GenericSyncer",
    "DisabledSyncer",
    "AutodetectSyncer",
)

from snakeoil import compatibility
from snakeoil.demandload import demandload

from pkgcore.config import ConfigHint, configurable

demandload(
    'os',
    'pwd',
    'stat',
    'errno',
    'snakeoil:process',
    'pkgcore:os_data,plugin,spawn',
)


class syncer_exception(Exception):
    pass


class uri_exception(syncer_exception):
    pass


class generic_exception(syncer_exception):
    pass
Example #41
0
    ChunkedDataDict, chunked_data, collapsed_restrict_to_data,
    incremental_expansion, incremental_expansion_license,
    non_incremental_collapsed_restrict_to_data, optimize_incrementals,
    package_keywords_splitter)
from pkgcore.ebuild.repo_objs import OverlayedLicenses
from pkgcore.repository import visibility
from pkgcore.restrictions import packages, values
from pkgcore.restrictions.delegated import delegate
from pkgcore.util.parserestrict import parse_match

demandload(
    'collections:defaultdict',
    'errno',
    'multiprocessing:cpu_count',
    'operator:itemgetter',
    're',
    'pkgcore.binpkg:repository@binary_repo',
    'pkgcore.ebuild:repository@ebuild_repo',
    'pkgcore.ebuild.triggers:generate_triggers@ebuild_generate_triggers',
    'pkgcore.fs.livefs:iter_scan',
    "pkgcore.repository:util",
)


class MissingFile(BaseError):
    """Required file is missing."""
    def __init__(self, filename, setting):
        BaseError.__init__(
            self, "setting %s points at %s, which doesn't exist."
            % (setting, filename))
        self.file, self.setting = filename, setting
Example #42
0
import pkgcore
from pkgcore import const, os_data
from pkgcore.ebuild import const as e_const

from snakeoil import klass
from snakeoil.currying import pretty_docs
from snakeoil.demandload import demandload
from snakeoil.osutils import abspath, normpath, pjoin
from snakeoil.weakrefs import WeakRefFinalizer

demandload(
    'logging',
    'itertools:chain',
    'traceback',
    'snakeoil:fileutils',
    'snakeoil:process',
    'snakeoil.process:spawn',
    'pkgcore.log:logger',
)


def _single_thread_allowed(functor):
    def _inner(*args, **kwds):
        _acquire_global_ebp_lock()
        try:
            return functor(*args, **kwds)
        finally:
            _release_global_ebp_lock()

    _inner.func = functor
Example #43
0
from snakeoil.compatibility import raise_from
from snakeoil.mappings import DictMixin, StackedDict
from snakeoil.osutils import listdir_dirs, listdir_files, access
from snakeoil.osutils import pjoin
from snakeoil.klass import jit_attr, jit_attr_named, alias_attr

from snakeoil.demandload import demandload
demandload(globals(),
    "snakeoil:chksum",
    "snakeoil.data_source:local_source,data_source",
    "pkgcore.merge:engine",
    "pkgcore.fs.livefs:scan",
    "pkgcore.fs.contents:offset_rewriter,contentsSet",
    "pkgcore.repository:wrapper",
    "pkgcore.package:base@pkg_base",
    "pkgcore.ebuild:ebd",
    "errno",
    "pkgcore.fs.tar:generate_contents",
    "pkgcore.binpkg.xpak:Xpak",
    "snakeoil:compression",
    'pkgcore.binpkg:remote',
)


class force_unpacking(triggers.base):

    required_csets = ('new_cset',)
    priority = 5
    _hooks = ('sanity_check',)
    _label = 'forced decompression'
Example #44
0
configuration subsystem
"""

__all__ = ("ConfigHint", "configurable", "load_config")

# keep these imports as minimal as possible; access to
# pkgcore.config isn't uncommon, thus don't trigger till
# actually needed

from snakeoil.demandload import demandload

from pkgcore import const

demandload(
    'os',
    'pkgcore.config:central,cparser',
    'pkgcore.ebuild.portage_conf:PortageConfig',
    'pkgcore.log:logger',
)


class ConfigHint(object):
    """hint for introspection supplying overrides"""

    # be aware this is used in clone
    __slots__ = ("types", "positional", "required", "typename",
                 "allow_unknowns", "doc", "authorative", 'requires_config')

    def __init__(self,
                 types=None,
                 positional=None,
                 required=None,
Example #45
0
we caught the exception.
"""

__all__ = (
    "base", "bz2_source", "data_source", "local_source", "text_data_source",
    "bytes_data_source", "invokable_data_source",
)

import errno

from snakeoil.currying import post_curry, partial
from snakeoil import compatibility, stringio, klass
from snakeoil.demandload import demandload
demandload(
    globals(),
    'codecs',
    'snakeoil:compression,fileutils',
)


def _mk_writable_cls(base, name):
    """
    inline mixin of writable overrides

    while a normal mixin is preferable, this is required due to
    differing slot layouts between py2k/py3k base classes of
    stringio.
    """

    class kls(base):
        __doc__ = """
Example #46
0
"""

__all__ = ("ConfigHint", "configurable", "load_config")

# keep these imports as minimal as possible; access to
# pkgcore.config isn't uncommon, thus don't trigger till
# actually needed

from snakeoil.demandload import demandload

from pkgcore import const

demandload(
    'os',
    'pkgcore.config:central,cparser',
    'pkgcore.ebuild.portage_conf:config_from_make_conf',
    'pkgcore.log:logger',
    'pkgcore.plugin:get_plugins',
)


class ConfigHint(object):
    """hint for introspection supplying overrides"""

    # be aware this is used in clone
    __slots__ = ("types", "positional", "required", "typename",
                 "allow_unknowns", "doc", "authorative", 'requires_config')

    def __init__(self,
                 types=None,
                 positional=None,
Example #47
0
__all__ = (
    "ConfigType", "LazySectionRef", "LazyNamedSectionRef", "ConfigSection",
    "DictConfigSection", "FakeIncrementalDictConfigSection", "convert_string",
    "convert_asis", "convert_hybrid", "section_alias", "str_to_list",
    "str_to_str", "str_to_bool", "str_to_int", "parse_config_file",
)

from functools import partial

from snakeoil import compatibility
from snakeoil.demandload import demandload

from pkgcore.config import errors, configurable

demandload("snakeoil:modules")

type_names = ("list", "str", "bool", "int")


# Copied from inspect.py which copied it from compile.h.
# Also documented in http://docs.python.org/ref/types.html.
CO_VARARGS, CO_VARKEYWORDS = 4, 8

if compatibility.is_py3k:
    _code_attrname = '__code__'
else:
    _code_attrname = 'func_code'


class ConfigType(object):
Example #48
0
# Copyright: 2011 Brian Harring <*****@*****.**>
# License: GPL2/BSD 3 clause

import threading
import queue

from snakeoil.compatibility import IGNORED_EXCEPTIONS
from snakeoil.demandload import demandload

demandload('multiprocessing:cpu_count', )


def reclaim_threads(threads):
    for x in threads:
        try:
            x.join()
        except IGNORED_EXCEPTIONS:
            raise
        except Exception as e:
            # should do something better here
            pass


def map_async(iterable, functor, *args, **kwds):
    per_thread_args = kwds.pop("per_thread_args", lambda: ())
    per_thread_kwds = kwds.pop("per_thread_kwds", lambda: {})
    parallelism = kwds.pop("threads", None)
    if parallelism is None:
        parallelism = cpu_count()

    if hasattr(iterable, '__len__'):
Example #49
0
# Copyright: 2006-2011 Brian Harring <*****@*****.**>
# License: BSD/GPL2

from snakeoil.cli import arghparse
from snakeoil.demandload import demandload

from pkgcore.util import commandline

demandload(
    'os',
    'functools:partial',
    'snakeoil:osutils',
    'pkgcore.ebuild:atom,conditionals',
    'pkgcore.ebuild.eapi:get_eapi',
    'pkgcore.restrictions.boolean:AndRestriction',
    'pkgcore.util:packages',
)

def str_pkg(pkg):
    pkg = packages.get_raw_pkg(pkg)
    # special casing; old style virtuals come through as the original pkg.
    if pkg.package_is_real:
        return pkg.cpvstr
    if hasattr(pkg, "actual_pkg"):
        return pkg.actual_pkg.cpvstr
    # icky, but works.
    return str(pkg.rdepend).lstrip("=")


def get_atom_kls(value):
    eapi = get_eapi(value)
Example #50
0
feed type) since this might change in the future. Scopes are integers,
but do not rely on that either.

Feed types have to match exactly. Scopes are ordered: they define a
minimally accepted scope, and for transforms the output scope is
identical to the input scope.
"""

from operator import attrgetter

from pkgcore.config import ConfigHint
from snakeoil.demandload import demandload

demandload(
    'itertools:chain',
    'logging',
    're',
)

repository_feed = "repo"
category_feed = "cat"
package_feed = "cat/pkg"
versioned_feed = "cat/pkg-ver"
ebuild_feed = "cat/pkg-ver+text"

# The plugger needs to be able to compare those and know the highest one.
version_scope, package_scope, category_scope, repository_scope = range(4)
max_scope = repository_scope


class Addon(object):
Example #51
0
import copy
import logging
import optparse
import os.path
import sys

from snakeoil import klass
from snakeoil.demandload import demandload

from pkgcore.config import load_config

demandload(
    'snakeoil.bash:iter_read_bash',
    'snakeoil:version',
    'pkgcore.config:basics',
    'pkgcore.ebuild:atom',
    'pkgcore.util:parserestrict',
)


CONFIG_LOADED_MSG = (
    'Configuration already loaded. If moving the option earlier '
    'on the commandline does not fix this report it as a bug.')


# Mix in object here or properties do not work (Values is an oldstyle class).
class Values(optparse.Values, object):

    """Values with an autoloaded config property.
Example #52
0
# Copyright: 2011 Brian Harring <*****@*****.**>
# License: GPL2/BSD 3 clause

from snakeoil.demandload import demandload

from pkgcore.util import commandline

demandload(
    'collections:defaultdict',
    'itertools:chain',
    'operator',
    'pkgcore.ebuild:atom,profiles',
)

commands = []
# changelog, once a changelog parser is available
# desc: possibly
# info: desc, keywords known, known profiles (possibly putting it elsewhere)
# global known flags, etc


def profile(value):
    return profiles.ProfileStack(commandline.existent_path(value))


class _base(commandline.ArgparseCommand):
    def bind_to_parser(self, parser):
        commandline.ArgparseCommand.bind_to_parser(self, parser)
        parser.add_argument("profile",
                            help="path to the profile to inspect",
                            type=profile)
Example #53
0
from snakeoil import compatibility, data_source, klass
from snakeoil.demandload import demandload
from snakeoil.fileutils import readfile
from snakeoil.mappings import IndeterminantDict
from snakeoil.osutils import listdir_dirs, pjoin

from pkgcore.config import ConfigHint
from pkgcore.ebuild import ebuild_built
from pkgcore.ebuild.cpv import versioned_CPV
from pkgcore.ebuild.errors import InvalidCPV
from pkgcore.repository import errors, multiplex, prototype

demandload(
    'pkgcore.log:logger',
    'pkgcore.vdb:repo_ops',
    'pkgcore.vdb.contents:ContentsFile',
)


class tree(prototype.tree):
    livefs = True
    configured = False
    configurables = ("domain", "settings")
    configure = None
    package_factory = staticmethod(ebuild_built.generate_new_factory)
    operations_kls = repo_ops.operations

    pkgcore_config_type = ConfigHint(
        {'location': 'str',
         'cache_location': 'str', 'repo_id': 'str',
Example #54
0
__all__ = ("GlsaDirSet", "SecurityUpgrades")

import os

from pkgcore.restrictions import packages, restriction, values
from pkgcore.config import ConfigHint

from snakeoil.osutils import listdir_files, pjoin
from snakeoil.klass import generic_equality
from snakeoil.iterables import caching_iter
from snakeoil.demandload import demandload
demandload(
    globals(),
    'pkgcore.package:mutated',
    'pkgcore.ebuild:cpv,atom,restricts@atom_restricts',
    'pkgcore.log:logger',
    'pkgcore.util.repo_utils:get_virtual_repos',
    'snakeoil.xml:etree',
)


class GlsaDirSet(object):
    """
    generate a pkgset based on GLSA's distributed via a directory.

    (rsync tree is the usual source.)
    """

    pkgcore_config_type = ConfigHint({'src': 'ref:repo'}, typename='pkgset')
    op_translate = {"ge": ">=", "gt": ">", "lt": "<", "le": "<=", "eq": "="}
Example #55
0
import errno
import os

from snakeoil.bash import read_bash_dict
from snakeoil.demandload import demandload
from snakeoil.fileutils import AtomicWriteFile
from snakeoil.lists import stable_unique, iflatten_instance
from snakeoil.osutils import listdir_files, normpath, pjoin

from pkgcore.merge import triggers, const, errors
from pkgcore.fs import livefs
from pkgcore.restrictions import values

demandload(
    'fnmatch',
    'snakeoil:compatibility',
    'pkgcore:os_data',
)

colon_parsed = frozenset([
    "ADA_INCLUDE_PATH",  "ADA_OBJECTS_PATH", "INFODIR", "INFOPATH",
    "LDPATH", "MANPATH", "PATH", "PRELINK_PATH", "PRELINK_PATH_MASK",
    "PYTHONPATH", "PKG_CONFIG_PATH", "ROOTPATH"
])

incrementals = frozenset([
    'ADA_INCLUDE_PATH', 'ADA_OBJECTS_PATH', 'CLASSPATH', 'CONFIG_PROTECT',
    'CONFIG_PROTECT_MASK', 'INFODIR', 'INFOPATH', 'KDEDIRS', 'LDPATH',
    'MANPATH', 'PATH', 'PRELINK_PATH', 'PRELINK_PATH_MASK', 'PYTHONPATH',
    'ROOTPATH', 'PKG_CONFIG_PATH'
])
Example #56
0
"""

__all__ = ("sync", "sync_main", "copy", "copy_main", "regen", "regen_main",
           "perl_rebuild", "perl_rebuild_main", "env_update",
           "env_update_main")

from pkgcore.util import commandline
from snakeoil.demandload import demandload
demandload(
    globals(),
    'os',
    'errno',
    'time',
    'snakeoil.osutils:pjoin,listdir_dirs',
    'pkgcore:spawn',
    'pkgcore.operations:observer',
    'pkgcore.repository:multiplex',
    'pkgcore.package:mutated',
    'pkgcore.fs:contents,livefs',
    'pkgcore.ebuild:processor,triggers',
    'pkgcore.merge:triggers@merge_triggers',
    'pkgcore.sync:base@sync_base',
    're',
)


def format_seq(seq, formatter=repr):
    if not seq:
        seq = None
    elif len(seq) == 1:
        seq = seq[0]
    else:
Example #57
0
    "static_attrgetter", "instance_attrgetter", "jit_attr", "jit_attr_none",
    "jit_attr_named", "jit_attr_ext_method", "alias_attr", "cached_hash",
    "cached_property", "cached_property_named",
    "steal_docs", "immutable_instance", "inject_immutable_instance",
    "alias_method", "patch",
)

from collections import deque
from functools import partial, wraps
from operator import attrgetter

from snakeoil import caching, compatibility
from snakeoil.currying import post_curry
from snakeoil.demandload import demandload

demandload('inspect')


def native_GetAttrProxy(target):
    def reflected_getattr(self, attr):
        return getattr(object.__getattribute__(self, target), attr)
    return reflected_getattr


def native_contains(self, key):
    """
    return True if key is in self, False otherwise
    """
    try:
        # pylint: disable=pointless-statement
        self[key]
Example #58
0
# Copyright: 2011 Brian Harring <*****@*****.**>
# License: GPL2/BSD 3 clause

import time
from snakeoil import compatibility
from snakeoil.demandload import demandload
demandload(
    globals(),
    'pkgcore.util.thread_pool:map_async',
)


def regen_iter(iterable, regen_func, observer, is_thread=False):
    for x in iterable:
        try:
            regen_func(x)
        except compatibility.IGNORED_EXCEPTIONS as e:
            if isinstance(e, KeyboardInterrupt):
                return
            raise
        except Exception as e:
            observer.error("caught exception %s while processing %s" % (e, x))


def regen_repository(repo,
                     observer,
                     threads=1,
                     pkg_attr='keywords',
                     **options):

    helpers = []
Example #59
0
# Copyright: 2005-2012 Brian Harring <*****@*****.**>
# License: GPL2/BSD

"""
restriction classes designed for package level matching
"""

from snakeoil.compatibility import is_py3k, IGNORED_EXCEPTIONS
from snakeoil.demandload import demandload
from snakeoil.klass import generic_equality, static_attrgetter

from pkgcore.restrictions import restriction, boolean

demandload("pkgcore.log:logger")

# Backwards compatibility.
package_type = restriction.package_type


class native_PackageRestriction(object):

    __slots__ = ('_pull_attr_func', '_attr_split', 'restriction', 'ignore_missing', 'negate')
    __attr_comparison__ = ("__class__", "negate", "_attr_split", "restriction")
    __metaclass__ = generic_equality

    def __init__(self, attr, childrestriction, negate=False, ignore_missing=True):
        """
        :param attr: package attribute to match against
        :param childrestriction: a :obj:`pkgcore.restrictions.values.base` instance
        to pass attr to for matching
        :param negate: should the results be negated?
Example #60
0
# License: GPL2/BSD

__all__ = (
    "rsync_syncer",
    "rsync_timestamp_syncer",
)

from snakeoil.demandload import demandload

from pkgcore.config import ConfigHint
from pkgcore.sync import base

demandload(
    'os',
    'socket',
    'tempfile',
    'time',
    'snakeoil.osutils:pjoin',
)


class rsync_syncer(base.ExternalSyncer):

    default_excludes = ['/distfiles', '/local', '/packages']
    default_includes = []
    default_conn_timeout = 15
    default_opts = [
        '--recursive',
        '--delete',
        '--delete-delay',
        '--perms',