예제 #1
0
    def finalize_options(self):
        # log file activation and config
        if self.debug == 'y':
            logging.basicConfig(format='%(asctime)s%(levelname)s:%(message)s', level=logging.DEBUG,
                                filename='wss_plugin.log')

        # load and import config file
        try:
            sys.path.append(self.pathConfig)
            self.configDict = __import__('config_file').config_info
            logging.info('Loading config_file was successful')
        except Exception as err:
            sys.exit("Can't import the config file." + err.message)

        # load proxy setting if exist
        if 'proxy' in self.configDict:
            self.proxySetting = self.configDict['proxy']
        if 'index_url' in self.configDict:
            self.pkgIndex = PackageIndex(index_url=self.configDict['index_url'])

        self.projectCoordinates = Coordinates.create_project_coordinates(self.distribution)
        self.userEnvironment = pk_res.Environment(get_python_lib(), platform=None, python=None)
        distribution_specification = self.distribution.get_name() + "==" + self.distribution.get_version()
        distribution_requirement = pk_res.Requirement.parse(distribution_specification)

        # resolve all dependencies
        try:
            self.distDepend = pk_res.working_set.resolve([distribution_requirement], env=self.userEnvironment)
            self.distDepend.pop(0)
            logging.info("Finished resolving dependencies")
        except Exception as err:
            print "distribution was not found on this system, and is required by this application", err.message
예제 #2
0
    def __init__(self, index_url=["http://pypi.python.org/simple"], hosts=("*",), *args, **kw):
        self.index_urls = []
        if type(index_url) is list:
            BasePackageIndex.__init__(self, index_url[0], hosts, *args, **kw)
            for url in index_url:
                self.index_urls.append(url + "/"[: not url.endswith("/")])

        else:
            BasePackageIndex.__init__(self, index_url, hosts, *args, **kw)
            self.index_urls.append(index_url)
예제 #3
0
 def fetch_requirement(req, dest_dir, force_download):
     from setuptools.package_index import PackageIndex  # @Reimport
     from pkg_resources import working_set  # @Reimport  # NOQA
     i = PackageIndex()
     if force_download:
         [i.remove(i[req.key][0]) for _ in xrange(len(i[req.key]))]
         d = i.download(req, dest_dir)
     else:
         d = i.fetch_distribution(req, dest_dir, force_scan=True)
     d = getattr(d, 'location', d) if d else ''
     return (d if d else working_set.resolve([req])[0].location)
예제 #4
0
    def finalize_options(self):

        if platform.system() != "Windows":
            self.curl = which('curl')
            self.bash = which('bash')
            self.gcloud_url = "https://sdk.cloud.google.com"
            self.silent = "--disable-prompts"
        else:
            self.silent = "/S"
            self.gcloud_url = "https://dl.google.com/dl/cloudsdk/channels/" \
                              "rapid/GoogleCloudSDKInstaller.exe"
        self.package_index = PackageIndex()
예제 #5
0
 def __init__(self, location, index=False, verbose=False):
     self.location = format_as_url(location)
     self.index = index
     if index:
         self.environment = PackageIndex(index_url=self.location,
                                         search_path=[])
     else:
         self.environment = PackageIndex(no_default_index=True)
         self.environment.add_find_links([self.location])
     self._projects = None
     self.tmpdir = mkdtemp(prefix="ensetuptools-")
     self.verbose = verbose
예제 #6
0
 def initialize_options(self):
     self.debug = None
     self.proxySetting = None
     self.service = None
     self.configDict = None
     self.pathConfig = None
     self.token = None
     self.userEnvironment = None
     self.distDepend = None
     self.pkgIndex = PackageIndex()
     self.dependencyList = []
     self.projectCoordinates = None
     self.tmpdir = tempfile.mkdtemp(prefix="wss_python_plugin-")
예제 #7
0
 def initialize_options(self):
     self.bdist_base = None
     self.dist_dir = None
     self.format = None
     self.keep_temp = False
     self.name_prefix = None
     self.package_index = PackageIndex()
     self.requirements_mapping = None
     self.selected_options = None
     self.use_pypi_deps = False
     self.use_wheel = False
     self.with_py_prefix = False
     self.initialize_manifest_options()
예제 #8
0
 def fetch_distribution(
     self, requirement, tmpdir, force_scan=False, source=False, develop_ok=False, local_index=None
 ):
     distribute_req = pkg_resources.Requirement.parse("distribute>=0.6.14")
     if pkg_resources.get_distribution("distribute") in distribute_req:
         # The local_index parameter is only in distribute>=0.6.14
         dist = PackageIndex.fetch_distribution(
             self, requirement, tmpdir, force_scan, source, develop_ok, local_index
         )
     else:
         dist = PackageIndex.fetch_distribution(self, requirement, tmpdir, force_scan, source, develop_ok)
     if dist:
         log.info("Using %s from %s" % (dist, dist.location))
     return dist
예제 #9
0
    def _do_upgrade(self, dist):
        # Build up a requirement for a higher bugfix release but a lower minor
        # release (so API compatibility is guaranteed)
        next_version = _next_version(dist.parsed_version)

        req = pkg_resources.Requirement.parse('{0}>{1},<{2}'.format(
            DIST_NAME, dist.version, next_version))

        package_index = PackageIndex(index_url=self.index_url)

        upgrade = package_index.obtain(req)

        if upgrade is not None:
            return self._do_download(version=upgrade.version)
예제 #10
0
    def _do_upgrade(self, dist):
        # Build up a requirement for a higher bugfix release but a lower minor
        # release (so API compatibility is guaranteed)
        next_version = _next_version(dist.parsed_version)

        req = pkg_resources.Requirement.parse(
            '{0}>{1},<{2}'.format(DIST_NAME, dist.version, next_version))

        package_index = PackageIndex(index_url=self.index_url)

        upgrade = package_index.obtain(req)

        if upgrade is not None:
            return self._do_download(version=upgrade.version)
예제 #11
0
def _do_upgrade(dist, index_url):
    # Build up a requirement for a higher bugfix release but a lower minor
    # release (so API compatibility is guaranteed)
    # sketchy version parsing--maybe come up with something a bit more
    # robust for this
    major, minor = (int(part) for part in dist.parsed_version[:2])
    next_minor = '.'.join([str(major), str(minor + 1), '0'])
    req = pkg_resources.Requirement.parse(
        '{0}>{1},<{2}'.format(DIST_NAME, dist.version, next_minor))

    package_index = PackageIndex(index_url=index_url)

    upgrade = package_index.obtain(req)

    if upgrade is not None:
        return _do_download(version=upgrade.version, index_url=index_url)
예제 #12
0
    def finalize_options(self):
        # log file activation and config
        if self.debug == "y":
            logging.basicConfig(
                format="%(asctime)s%(levelname)s:%(message)s", level=logging.DEBUG, filename="wss_plugin.log"
            )

        # load and import config file
        try:
            sys.path.append(self.pathConfig)
            self.configDict = __import__("config_file").config_info
            logging.info("Loading config_file was successful")
        except Exception as err:
            sys.exit("Can't import the config file." + err.message)

        # load proxy setting if exist
        if "proxy" in self.configDict:
            self.proxySetting = self.configDict["proxy"]
        if "index_url" in self.configDict:
            self.pkgIndex = PackageIndex(index_url=self.configDict["index_url"])

        self.projectCoordinates = Coordinates.create_project_coordinates(self.distribution)
        self.userEnvironment = pk_res.Environment(get_python_lib(), platform=None, python=None)
        distribution_specification = self.distribution.get_name() + "==" + self.distribution.get_version()
        distribution_requirement = pk_res.Requirement.parse(distribution_specification)

        # resolve all dependencies
        try:
            self.distDepend = pk_res.working_set.resolve([distribution_requirement], env=self.userEnvironment)
            self.distDepend.pop(0)
            logging.info("Finished resolving dependencies")
        except Exception as err:
            print "distribution was not found on this system, and is required by this application", err.message
예제 #13
0
파일: ppadd.py 프로젝트: saxix/pyppi
 def handle_label(self, label, **options):
     with tempdir() as tmp:
         self.pypi = PackageIndex(options["index"])
         path = self.pypi.download(label, tmp)
         if path:
             self._save_package(path, **options)
         else:
             print "Could not add %s. Not found." % label
예제 #14
0
파일: utils.py 프로젝트: openalea/PkgIt
def download_egg(eggname, dir):
    """Download an egg to a specific place
    
    :param eggname: name of egg to download
    :param dir: destination directory
    :return: local path
    """
    logger.info("Downloading %s"%eggname)
    return pi.download(eggname, dir)
예제 #15
0
def cache_package(spec, own_url):
    try:
        spec = Requirement.parse(spec)

    except ValueError:
        raise ArgumentError(
            "Not a URL, existing file, or requirement spec: %r" % (spec, ))

    try:
        # download and unpack source package
        path = tempfile.mkdtemp('.spynepi')
        logger.info("Downloading %r" % spec)
        dist = PackageIndex().fetch_distribution(spec,
                                                 path,
                                                 force_scan=True,
                                                 source=True)
        archive_path = dist.location
        logger.info("Unpacking %r" % archive_path)
        unpack_archive(dist.location, path)

        # generate pypirc if possible
        if os.environ.has_key('HOME'):
            _generate_pypirc(own_url)
        else:  # FIXME: ??? No idea. Hopefully setuptools knows better.
            pass  # raise NotImplementedError("$HOME not defined, .pypirc not found.")

        # find setup.py in package. plagiarized from setuptools.
        setups = glob(os.path.join(path, '*', 'setup.py'))
        if not setups:
            raise ValidationError(
                "Couldn't find a setup script in %r editable distribution: %r"
                % (spec, os.path.join(path, '*', 'setup.py')))

        if len(setups) > 1:
            raise ValidationError(
                "Multiple setup scripts found in %r editable distribution: %r"
                % (spec, setups))

        # self-register the package.
        lib_dir = os.path.dirname(setups[0])
        command = ["python", "setup.py", "register", "-r", REPO_NAME]
        logger.info('calling %r', command)
        subprocess.call(command, cwd=lib_dir, stdout=sys.stdout)

        # self-upload the package
        command = ["python", "-m", "spynepi.util.pypi.upload", archive_path]
        logger.info('calling %r', command)
        subprocess.call(command,
                        cwd=lib_dir,
                        stdin=sys.stdin,
                        stdout=sys.stdout)

    finally:
        shutil.rmtree(path)
예제 #16
0
 def fetch_distribution(self,
                        requirement,
                        tmpdir,
                        force_scan=False,
                        source=False,
                        develop_ok=False,
                        local_index=None):
     distribute_req = pkg_resources.Requirement.parse('distribute>=0.6.14')
     if pkg_resources.get_distribution('distribute') in distribute_req:
         # The local_index parameter is only in distribute>=0.6.14
         dist = PackageIndex.fetch_distribution(self, requirement, tmpdir,
                                                force_scan, source,
                                                develop_ok, local_index)
     else:
         dist = PackageIndex.fetch_distribution(self, requirement, tmpdir,
                                                force_scan, source,
                                                develop_ok)
     if dist:
         log.info('Using %s from %s' % (dist, dist.location))
     return dist
예제 #17
0
def grab_distrib(req, index=None, dest='.', search_pypi=True):
    """\
Downloads a distribution from the given package index(s) based on the
given requirement string(s). Downloaded distributions are placed in the
specified destination or the current directory if no destination is
specified.  If a distribution cannot be found in the given index(s), the
Python Package Index will be searched as a last resort unless 
search_pypi is False.  This does NOT install the distribution.
    """

    # allow multiple package indexes to be specified
    if index is None:
        index = []
    elif isinstance(index, basestring):
        index = [index]
    # else just assume it's some iterator of indexes

    # add PyPI as last place to search if it wasn't already specified
    if search_pypi and _pypi not in index and (_pypi + '/') not in index:
        index.append(_pypi)

    # allow specification of single or multiple requirements
    if isinstance(req, basestring):
        reqs = [Requirement.parse(req)]
    elif isinstance(req, Requirement):
        reqs = [req]
    else:
        reqs = []
        for rr in req:
            if isinstance(rr, basestring):
                reqs.append(Requirement.parse(rr))
            elif isinstance(rr, Requirement):
                reqs.append(rr)
            else:
                raise TypeError("supplied requirement arg must be a string" +
                                " or a Requirement, but given type is %s" %
                                type(rr))

    index_list = [PackageIndex(idx, search_path=[]) for idx in index]

    for req in reqs:
        fetched = None
        for idx in index_list:
            _logger.info('Looking for %s at package index %s' %
                         (req, idx.index_url))
            fetched = idx.download(req, dest)
            if fetched:
                _logger.info('    %s successfully downloaded' % fetched)
                break
        else:
            _logger.error("couldn't find distrib for %s" % req)

    return fetched
예제 #18
0
 def initialize_options(self):
     self.debug = None
     self.proxySetting = None
     self.service = None
     self.configDict = None
     self.pathConfig = None
     self.token = None
     self.userEnvironment = None
     self.distDepend = None
     self.pkgIndex = PackageIndex()
     self.dependencyList = []
     self.projectCoordinates = None
     self.tmpdir = tempfile.mkdtemp(prefix="wss_python_plugin-")
예제 #19
0
 def initialize_options(self):
     self.bdist_base = None
     self.dist_dir = None
     self.format = None
     self.keep_temp = False
     self.name_prefix = None
     self.package_index = PackageIndex()
     self.requirements_mapping = None
     self.selected_options = None
     self.use_pypi_deps = False
     self.use_wheel = False
     self.with_py_prefix = False
     self.initialize_manifest_options()
예제 #20
0
def checkout_extension(name):
    log('Downloading extension %s to temporary folder', name)
    root = os.path.join(tdir, name)
    os.mkdir(root)
    checkout_path = PackageIndex().download(name, root)

    unpack_archive(checkout_path, root)
    path = None
    for fn in os.listdir(root):
        path = os.path.join(root, fn)
        if os.path.isdir(path):
            break
    log('Downloaded to %s', path)
    return path
예제 #21
0
class HTMLRepository(RemoteRepository):
    """\
    A remote repository which easy_install can cope with.

    """
    def __init__(self, location, index=False, verbose=False):
        self.location = format_as_url(location)
        self.index = index
        if index:
            self.environment = PackageIndex(index_url=self.location,
                                            search_path=[])
        else:
            self.environment = PackageIndex(no_default_index=True)
            self.environment.add_find_links([self.location])
        self._projects = None
        self.tmpdir = mkdtemp(prefix="ensetuptools-")
        self.verbose = verbose

    @property
    def projects(self):
        if self._projects == None:
            self._projects = {}
            info("Scanning repository at %s..." % self.location)
            self.environment.prescan()
            self.environment.scan_all()
            for project in self.environment:
                self._projects[project] = HTMLProject(self, project,
                    verbose=self.verbose)
            for project in self.environment.package_pages:
                if project not in self._projects:
                    self._projects[project] = HTMLProject(self, project,
                        verbose=self.verbose)
                self._projects[project].scan_needed = True
        return self._projects

    def search(self, combiner='and', **kwargs):
        if 'project' not in kwargs:
            raise SearchError("EasyInstall-based remote repositories require"
                              " a 'project' search term.")
        if isinstance(kwargs['project'], basestring):
            return self.projects[kwargs['project']].match(combiner, **kwargs)
        partial_match_names = set(project_name
                                  for project_name in self.projects
                                  if kwargs['project'].match(project_name))
        matches = []
        for project in partial_match_names:
            matches += self.projects[project].search(combiner, **kwargs)
        return matches
예제 #22
0
    def finalize_options(self):
        # log file activation and config
        if self.debug == 'y':
            logging.basicConfig(format='%(asctime)s%(levelname)s:%(message)s', level=logging.DEBUG,
                                filename='wss_plugin.log')

        # load and import config file
        try:
            sys.path.append(self.pathConfig)
            if sys.version_info.major >= 3:
                config_file_spec = importlib.util.spec_from_file_location('config_file', self.pathConfig)
                config_file_module = importlib.util.module_from_spec(config_file_spec)
                config_file_spec.loader.exec_module(config_file_module)
                self.configDict = config_file_module.config_info
            else:
                self.configDict = imp.load_source('config_file', self.pathConfig).config_info
            logging.info('Loading config_file was successful')
        except Exception as err:
            print("Can't import the config file.")
            sys.exit(err)

        # load proxy setting if exist
        if 'proxy' in self.configDict:
            self.proxySetting = self.configDict['proxy']
        if 'index_url' in self.configDict:
            self.pkgIndex = PackageIndex(index_url=self.configDict['index_url'])

        self.projectCoordinates = Coordinates.create_project_coordinates(self.distribution)
        self.userEnvironment = pk_res.Environment([get_python_lib()], platform=None, python=None)
        distribution_specification = self.distribution.get_name() + "==" + self.distribution.get_version()
        distribution_requirement = pk_res.Requirement.parse(distribution_specification)

        # resolve all dependencies
        try:
            self.distDepend = pk_res.working_set.resolve([distribution_requirement], env=self.userEnvironment)
            self.distDepend.pop(0)
            logging.info("Finished resolving dependencies")
        except Exception as err:
            print("distribution was not found on this system, and is required by this application", err.message)
예제 #23
0
 def __init__(self, *args, **kwargs):
     PackageIndex.__init__(self, *args, **kwargs)
     self.to_scan = dists
예제 #24
0
class Command(LabelCommand):
    option_list = LabelCommand.option_list + (
            make_option("-o", "--owner", help="add packages as OWNER",
                        metavar="OWNER", default=None),
        )
    help = """Add one or more packages to the repository. Each argument can
be a package name or a URL to an archive or egg. Package names honour
the same rules as easy_install with regard to indicating versions etc.

If a version of the package exists, but is older than what we want to install,
the owner remains the same.

For new packages there needs to be an owner. If the --owner option is present
we use that value. If not, we try to match the maintainer of the package, form
the metadata, with a user in out database, based on the If it's a new package
and the maintainer emailmatches someone in our user list, we use that. If not,
the package can not be
added"""

    def __init__(self, *args, **kwargs):
        self.pypi = PackageIndex()
        LabelCommand.__init__(self, *args, **kwargs)

    def handle_label(self, label, **options):
        with tempdir() as tmp:
            reqs = pkg_resources.parse_requirements(label)
            for req in reqs:
                try:
                    package = self.pypi.fetch_distribution(req, tmp, source=True)
                except Exception as err:
                    print "Could not add %s: %s." % (req, err)
                else:
                    self._save_package(package.location, options["owner"])

    def _save_package(self, path, ownerid):
        meta = self._get_meta(path)

        try:
            # can't use get_or_create as that demands there be an owner
            package = Package.objects.get(name=meta.name)
            isnewpackage = False
        except Package.DoesNotExist:
            package = Package(name=meta.name)
            isnewpackage = True

        release = package.get_release(meta.version)
        if not isnewpackage and release and release.version == meta.version:
            print "%s-%s already added" % (meta.name, meta.version)
            return

        # algorithm as follows: If owner is given, try to grab user with that
        # username from db. If doesn't exist, bail. If no owner set look at
        # mail address from metadata and try to get that user. If it exists
        # use it. If not, bail.
        owner = None

        if ownerid:
            try:
                if "@" in ownerid:
                    owner = User.objects.get(email=ownerid)
                else:
                    owner = User.objects.get(username=ownerid)
            except User.DoesNotExist:
                pass
        else:
            try:
                owner = User.objects.get(email=meta.author_email)
            except User.DoesNotExist:
                pass

        if not owner:
            print "No owner defined. Use --owner to force one"
            return

        # at this point we have metadata and an owner, can safely add it.

        package.owner = owner
        # Some packages don't have proper licence, seems to be a problem
        # with setup.py upload. Use "UNKNOWN"
        package.license = meta.license or "Unknown"
        package.metadata_version = meta.metadata_version
        package.author = meta.author
        package.home_page = meta.home_page
        package.download_url = meta.download_url
        package.summary = meta.summary
        package.description = meta.description
        package.author_email = meta.author_email

        package.save()

        # TODO: Do I need add classifieres objects???
#        for classifier in meta.classifiers:
#            package.classifiers.add(
#                    Classifier.objects.get_or_create(name=classifier)[0])
        release = Release()
        release.version = meta.version
        release.package = package
        release.package_info = self._get_pkg_info(meta)
        # Classifiers is processed separatily since it is a list a must be
        # properly set son getlist returns the right result
        for cs in meta.classifiers:
            release.package_info.update({'classifiers': cs})

        release.save()

        dis = Distribution()
        dis.release = release

        dis.content.file = open(path, 'rb')
        dis.content.name = settings.DJANGOPYPI_RELEASE_UPLOAD_TO + '/' +\
                path.split('/')[-1]
        # TODO: Very bad hack here, how can I fix it?
        shutil.copy(path, settings.MEDIA_ROOT + '/' + dis.content.name)

        dis.md5_digest = self._get_md5(path)
        dis.filetype = self._get_filetype(path)
        dis.uploader = owner
        dis.comment = ''
        dis.pyversion = meta.requires_python or ''
        dis.signature = ''

        dis.save()
        print "%s-%s added" % (meta.name, meta.version)

    def _get_filetype(self, filename):
        "Returns the package file type, sdist o bdist"
        # TODO: review this, very empiric rules
        if filename.endswith('.zip') or filename.endswith('.tar.gz'):
            return 'sdist'
        raise TypeError(
                "The download resource:{filename} is not a source file".format(
                    filename=filename))

    def _get_md5(self, filename):
        "Returns md5 sum for a given file"
        md5 = hashlib.md5()
        with open(filename, 'rb') as content:
            while(1):
                block = content.read(md5.block_size)
                if not block:
                    break
                md5.update(block)
        return md5.hexdigest()

    def _get_pkg_info(self, meta):
        """
        Transforms metadata from a package to dict usable for MultiValueDict
        instances.
        """
        meta_version = meta.metadata_version
        if hasattr(meta, 'classifiers') or hasattr(meta,
                'download_url') and meta_version == '1.0':
            meta_version = '1.1'
        fields = conf.METADATA_FIELDS[meta_version]
        metadict = dict([(key, [getattr(meta, key),]) for key in dir(meta)
                if key in fields and not key.startswith('_') and
                key != 'classifiers'])
        return metadict


    def _get_meta(self, path):
        data = pkginfo.get_metadata(path)
        if data:
            return data
        else:
            print "Couldn't get metadata from %s. Not added to chishop" % (
                    os.path.basename(path))
            return None
예제 #25
0
 def process_filename(self, fn, nested=False):
     PackageIndex.process_filename(self, fn, nested)
     dist = distro_from_setup_cfg(fn)
     if dist:
         self.add(dist)
예제 #26
0
class bdist_pkg(Command):
    description = 'create FreeBSD pkg distribution'

    user_options = [
        ('bdist-base=', 'b',
         'Base directory for creating built distributions.'),
        ('dist-dir=', 'd',
         'Directory to put distribute files in.'),
        ('format=', 'f',
         'Set format as the package output format.  It can be one'
         ' of txz, tbz, tgz or tar.  If an invalid or no format is specified'
         ' tgz is assumed.'),
        ('keep-temp', None,
         'Keep intermediate build directories and files.'),
        ('origin=', None,
         'Custom origin name for build package.'),
        ('use-pypi-deps', None,
         'Automatically convert unknown Python dependencies to package ones.'
         ' Note that those dependencies will be named with py{}{}- prefix and'
         ' assumes that you have such packages in repository.'
         ''.format(*sys.version_info[:2])),
        ('use-wheel', None,
         'Use bdist_wheel to generated install layout instead of install'
         ' command.'),
        ('with-py-prefix', None,
         'Prepends py{}{}- prefix to package name.'
         ''.format(*sys.version_info[:2])),
    ]
    boolean_options = ('keep-temp', 'use-wheel', 'python-deps-to-pkg',
                       'with-py-prefix')

    compressor_for_format = {
        'txz': lzma,
        'tgz': gzip,
        'tbz': bz2,
    }

    def initialize_options(self):
        self.bdist_base = None
        self.dist_dir = None
        self.format = None
        self.keep_temp = False
        self.name_prefix = None
        self.package_index = PackageIndex()
        self.requirements_mapping = None
        self.selected_options = None
        self.use_pypi_deps = False
        self.use_wheel = False
        self.with_py_prefix = False
        self.initialize_manifest_options()

    def initialize_manifest_options(self):
        # TODO: What is it and how to use it?
        # self.annotations = None
        self.abi = None
        self.arch = None
        self.categories = None
        # TODO: Could conflicts be useful for us?
        # self.conflicts = None
        self.comment = None
        # TODO: What is it and how to use it?
        # self.dep_formula = None
        self.deps = None
        self.desc = None
        # These fields are autogenerated:
        # self.directories = None
        # self.dirs = None
        # self.files = None
        # self.flatsize = None
        self.groups = None
        self.license = None
        self.maintainer = None
        # TODO: should that be single message or multiple ones?
        # self.messages = None
        self.name = None
        self.options = None
        self.selected_options = None
        # Since we use extras, which don't have either defaults or descriptions
        # these fields are not supported so far:
        # self.options_defaults = None
        # self.options_descriptions = None
        self.origin = None
        # TODO: What is the path?
        # self.path = None
        self.prefix = None
        self.provides = None
        self.requires = None
        self.scripts = None
        # TODO: Do we need shared libs support?
        # self.shlibs = None
        # self.shlibs_provides = None
        # self.shlibs_requires = None
        # TODO: Support checksum.
        # self.sum = None
        self.users = None
        self.version = None
        # TODO: Can Python packages be vital?
        # self.vital = None
        self.www = None

    def finalize_options(self):
        self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
        self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
        self.ensure_format('tgz')
        self.bdist_dir = os.path.join(self.bdist_base, 'pkg')
        self.install_dir = os.path.join(self.bdist_dir, 'root')
        self.finalize_manifest_options()

    def finalize_manifest_options(self):
        project = self.distribution
        self.ensure_string('abi', self.get_abi())
        self.ensure_string('arch', self.get_arch())
        self.ensure_categories(project)
        self.ensure_string('comment', project.get_description())
        self.ensure_desc(project)
        self.ensure_string_list('groups')
        self.ensure_string('license', self.resolve_license(project))
        self.ensure_string('maintainer', self.get_maintainer(project))
        self.ensure_name(project)
        self.ensure_string('origin', self.get_default_origin(project))
        self.ensure_prefix('/usr/local')
        self.ensure_string_list('provides')
        self.ensure_string_list('requires')
        self.ensure_scripts()
        self.ensure_string('version', project.get_version())
        self.ensure_string_list('users')
        self.ensure_string('www', project.get_url())
        self.ensure_options()
        self.ensure_deps()
        self.maybe_rename_console_scripts(project)

    def run(self):
        self.build_and_install()
        self.make_pkg(self.generate_manifest_content())
        self.maybe_remove_temp(self.bdist_base)

    def build_and_install(self):
        if self.use_wheel:
            self.build_and_install_via_wheel()
        else:
            self.build_and_install_via_setuptools()

    def build_and_install_via_setuptools(self):
        # Basically, we need the intermediate results of bdist_dumb,
        # but since it's too monolithic and does the stuff that we would like
        # to avoid, here short copy-paste happens /:
        build = self.reinitialize_command('build', reinit_subcommands=1)
        build.build_base = self.bdist_base
        self.run_command('build')
        install = self.reinitialize_command('install', reinit_subcommands=1)
        install.prefix = self.prefix
        install.root = self.install_dir
        install.warn_dir = 0
        self.run_command('install')

    def build_and_install_via_wheel(self):
        if not wheel_available:
            raise RuntimeError('The `wheel` package is not available.')
        build = self.reinitialize_command('build', reinit_subcommands=1)
        build.build_base = self.bdist_base
        bdist_wheel = self.reinitialize_command(
            'bdist_wheel',
            reinit_subcommands=1
        )
        bdist_wheel.bdist_base = self.bdist_base
        bdist_wheel.keep_temp = True
        self.run_command('bdist_wheel')
        name = self.distribution.get_name()
        pip.wheel.move_wheel_files(
            name=self.name,
            req=WhlRequirement.parse('{}=={}'.format(name, self.version)),
            wheeldir=bdist_wheel.bdist_dir,
            root=self.install_dir,
            prefix=self.prefix,
        )

    def generate_manifest_content(self):
        manifest = {
            'abi': self.abi,
            'arch': self.arch,
            'categories': self.categories,
            'comment': self.comment,
            'deps': self.deps,
            'desc': self.desc,
            'directories': {},
            'files': {},
            'flatsize': 0,
            'groups': self.groups,
            'licenselogic': 'single',
            'licenses': [self.license] if self.license else [],
            'maintainer': self.maintainer,
            'name': self.name,
            'options': self.options,
            'origin': self.origin,
            'prefix': self.prefix,
            'provides': self.provides,
            'requires': self.requires,
            'scripts': self.scripts,
            'users': self.users,
            'version': self.version,
            'www': self.www,
        }

        mdirs = manifest['directories']
        mfiles = manifest['files']
        for real_file_path, install_path in self.iter_install_files():
            with open(real_file_path, 'rb') as fh:
                data = fh.read()
                manifest['flatsize'] += len(data)
                mdirs[os.path.dirname(install_path)] = {
                    'gname': 'wheel',
                    'perm': '0755',
                    'uname': 'root',
                }
                mfiles[install_path] = {
                    'gname': 'wheel',
                    'perm': '0644',
                    'sum': hashlib.sha256(data).hexdigest(),
                    'uname': 'root',
                }

        # TODO: Should we keep UNKNOWN values?
        manifest = {key: value for key, value in manifest.items()
                    if value and value != 'UNKNOWN'}

        if 'name' not in manifest:
            raise DistutilsOptionError('Project must have name defined')

        if 'version' not in manifest:
            raise DistutilsOptionError('Project must have version defined')

        if 'comment' not in manifest:
            raise DistutilsOptionError('Project must have description defined')

        if 'desc' not in manifest:
            raise DistutilsOptionError('Project must have long_description'
                                       ' defined')

        if 'maintainer' not in manifest:
            raise DistutilsOptionError('Project must have author or maintainer'
                                       ' defined')

        return manifest

    def make_pkg(self, manifest):
        manifest_path = self.make_manifest(manifest)
        compact_manifest_path = self.make_compact_manifest(manifest)
        files_paths = chain([
            (manifest_path, os.path.basename(manifest_path)),
            (compact_manifest_path, os.path.basename(compact_manifest_path))
        ], self.iter_install_files())

        self.mkpath(self.dist_dir)
        tar_path = self.make_tar(files_paths)

        ext = self.format
        if ext != 'tar':
            compressor = self.get_compressor(ext)
            if compressor is None:
                raise RuntimeError('Format {} is not supported'.format(ext))
            self.compress_tar(tar_path, ext, compressor)
            os.remove(tar_path)

    def make_manifest(self, content):
        path = os.path.join(self.bdist_dir, '+MANIFEST')
        with open(path, 'w') as fobj:
            json.dump(content, fobj, sort_keys=True, indent=4)
        return path

    def make_compact_manifest(self, content):
        path = os.path.join(self.bdist_dir, '+COMPACT_MANIFEST')
        compact_content = content.copy()
        compact_content.pop('directories')
        compact_content.pop('files')
        with open(path, 'w') as fobj:
            json.dump(compact_content, fobj, sort_keys=True, indent=4)
        return path

    def make_tar(self, files_paths):
        basename = '{}-{}.tar'.format(self.name, self.version)
        path = os.path.join(self.dist_dir, basename)
        seen = set()
        with tarfile.open(path, 'w') as tar:
            for file_path, tar_path in files_paths:
                tar_dir_path = os.path.dirname(tar_path)
                if tar_dir_path and tar_dir_path not in seen:
                    tarinfo = tar.gettarinfo(os.path.dirname(file_path),
                                             tar_dir_path)
                    tarinfo.name = tar_dir_path
                    tar.addfile(tarinfo)
                    seen.add(tar_dir_path)
                tarinfo = tar.gettarinfo(file_path, tar_path)
                tarinfo.name = tar_path
                with open(file_path, 'rb') as f:
                    tar.addfile(tarinfo, f)
        return path

    def compress_tar(self, tar_path, ext, compressor):
        txx_path = tar_path.rsplit('.tar', 1)[0] + '.' + ext
        with compressor.open(txx_path, 'w') as txx:
            with open(tar_path, 'rb') as tar:
                txx.write(tar.read())
        return txx_path

    def get_compressor(self, format):
        return self.compressor_for_format.get(format)

    def get_abi(self):
        if platform.system().lower() != 'freebsd':
            if not self.distribution.is_pure():
                raise DistutilsOptionError(
                    'Unable to determine default ABI value'
                    ' since bdist_pkg call happens not on FreeBSD system.'
                    ' Please specify this value according the target system'
                    ' for which you build this package.'
                )
            return '*'
        return ':'.join((
            platform.system(),
            # 10.1-STABLE-r273058 -> 10
            platform.release().split('-', 1)[0].split('.')[0],
            # TODO: ensure that platform.machine() gives correct values
            platform.machine()
        ))

    def get_arch(self):
        if platform.system().lower() != 'freebsd':
            if not self.distribution.is_pure():
                raise DistutilsOptionError(
                    'Unable to determine default ARCH value'
                    ' since bdist_pkg call happens not on FreeBSD system.'
                    ' Please specify this value according the target system'
                    ' for which you build this package.'
                )
            return '*'
        return ':'.join((
            platform.system(),
            # 10.1-STABLE-r273058 -> 10
            platform.release().split('-', 1)[0].split('.')[0],
            # TODO: shouldn't there be a better way?
            'x86:64' if platform.machine() == 'amd64' else 'x86:32'
        ))

    def get_default_origin(self, project):
        return 'devel/py{}{}-{}'.format(sys.version_info[0],
                                        sys.version_info[1],
                                        project.get_name())

    def get_maintainer(self, project):
        maintainer = '{} <{}>'.format(project.get_maintainer(),
                                      project.get_maintainer_email())
        if maintainer == 'UNKNOWN <UNKNOWN>':
            # No explicit maintainer specified, use author contact instead
            maintainer = '{} <{}>'.format(project.get_author(),
                                          project.get_author_email())
        return maintainer

    def resolve_license(self, project):
        # Thanks for this mapping goes to pytoport project
        py2freebsd_mapping = {
            'agpl-3.0': 'AGPLv3',
            'apache-2.0': 'APACHE20',
            'artistic-2.0': 'ART20',
            'bsd-2-clause': 'BSD2CLAUSE',
            'bsd-3-clause-clear': 'BSD3CLAUSE',
            'bsd-3-clause': 'BSD3CLAUSE',
            'cc0-1.0': 'CC0-1.0',
            'epl-1.0': 'EPL',
            'gpl-2.0': 'GPLv2',
            'gpl-3.0': 'GPLv3',
            'isc': 'ISCL',
            'lgpl-2.1': 'LGPL21',
            'lgpl-3.0': 'LGPL3',
            'mit': 'MIT',
            'mpl-2.0': 'MPL',
            'ofl-1.1': 'OFL11',
        }
        license = project.get_license()
        pkg_license = py2freebsd_mapping.get(license.lower())
        if license != 'UNKNOWN' and pkg_license is None:
            self.warn('Unable to convert license %s to PKG naming' % license)
            return license
        return pkg_license

    def ensure_format(self, default):
        self.ensure_string('format', default)
        if self.format not in {'txz', 'tbz', 'tgz', 'tar'}:
            self.warn('Unknown format {!r}, falling back to {}'
                      ''.format(self.format, default))
            self.format = default

    def ensure_prefix(self, default=None):
        self.ensure_string('prefix', default)
        self.prefix = self.prefix.rstrip('/')

    def ensure_categories(self, project):
        self.categories = self.categories or project.get_keywords()
        self.ensure_string_list('categories')

    def ensure_deps(self):
        install_requires = set(self.distribution.install_requires or [])
        for option in self.selected_options:
            install_requires |= set(self.distribution.extras_require[option])
        mapping = self.requirements_mapping or {}
        self.deps = self.deps or {}

        seen_deps = set([])
        for python_dep, spec in mapping.items():
            if not isinstance(python_dep, str):
                raise DistutilsOptionError('Invalid Python dependency: {}'
                                           ''.format(python_dep))

            if python_dep not in install_requires:
                raise DistutilsOptionError('{} is not in install requires list'
                                           ''.format(python_dep))

            if not isinstance(spec, dict):
                raise DistutilsOptionError('requirements_mapping items must be'
                                           ' dict, got {}'.format(repr(spec)))
            if set(spec) != {'origin', 'version', 'name'}:
                raise DistutilsOptionError('requirements_mapping items must'
                                           ' have "origin" and "version" keys,'
                                           ' got {}'.format(set(spec)))
            for key in {'origin', 'version', 'name'}:
                if not isinstance(spec[key], str):
                    raise DistutilsOptionError('"{}" value must be string, got'
                                               ' {}'.format(key, spec[key]))

            self.deps[spec['name']] = {'origin': spec['origin'],
                                       'version': spec['version']}
            seen_deps.add(python_dep)

        missing = seen_deps ^ install_requires
        if missing and self.use_pypi_deps:
            for item in missing:
                requirement = Requirement.parse(item)
                distribution = self.package_index.obtain(requirement)
                key = 'py{1}{2}-{0}'.format(distribution.key,
                                            *sys.version_info[:2])
                self.deps[key] = {
                    'origin': 'pypi/py-{}'.format(distribution.key),
                    'version': distribution.version
                }
        elif missing:
            raise DistutilsOptionError('These packages are listed in install'
                                       ' requirements, but not in bdist_pkg'
                                       ' requirements mapping: {}'
                                       ''.format(', '.join(missing)))

    def ensure_desc(self, project):
        desc = project.get_long_description()
        desc = desc if desc != 'UKNOWN' else project.get_description()
        desc = self.cut_changelog(desc)
        self.ensure_string('desc', desc)

    def ensure_name(self, project):
        name = project.get_name()
        if self.with_py_prefix:
            name = 'py{}{}-{}'.format(
                sys.version_info[0], sys.version_info[1], name
            )
        self.ensure_string('name', name)

    def ensure_options(self):
        provided_options = set(self.distribution.extras_require or {})
        self.selected_options = set(self.selected_options or [])
        unknown_options = self.selected_options - provided_options
        if not unknown_options:
            self.options = {option: option in self.selected_options
                            for option in provided_options}
        else:
            raise DistutilsOptionError('Unknown extras selected: {}'
                                       ''.format(', '.join(unknown_options)))

    def ensure_scripts(self):
        if self.scripts is None:
            return
        if not isinstance(self.scripts, dict):
            raise DistutilsOptionError('scripts must be a dict, got {}'
                                       ''.format(self.scripts))
        valid_keys = {
            'pre-install',
            'post-install',
            'install',
            'pre-deinstall',
            'post-deinstall',
            'deinstall',
            'pre-upgrade',
            'post-upgrade',
            'upgrade',
        }
        bad_keys = [key for key in self.scripts if key not in valid_keys]
        if bad_keys:
            raise DistutilsOptionError('invalid scripts: {}'
                                       ''.format(', '.join(bad_keys)))
        bad_keys = [key for key, value in self.scripts.items()
                    if not isinstance(value, str)]
        if bad_keys:
            raise DistutilsOptionError('invalid scripts: {}'
                                       ''.format(', '.join(bad_keys)))

    def iter_install_files(self):
        for root, dirs, files in os.walk(self.install_dir):
            for file in files:
                reldir = os.path.relpath(root, self.install_dir)
                install_path = '/' + os.path.join(reldir, file)
                install_path = install_path.replace(self.prefix + '/lib64/',
                                                    self.prefix + '/lib/')
                yield os.path.join(root, file), install_path

    def maybe_remove_temp(self, path):
        if self.keep_temp:
            return
        if path is None:
            return
        if os.path.exists(path):
            shutil.rmtree(path)

    def maybe_rename_console_scripts(self, project):
        if not self.with_py_prefix:
            return
        if not project.entry_points:
            return
        console_scripts = project.entry_points.get('console_scripts')
        if console_scripts is None:
            return
        prefixed_console_scripts = []
        for script in console_scripts:
            name, callback = script.split('=')
            name = '{}{}.{}'.format(name.strip(), *sys.version_info[:2])
            prefixed_console_scripts.append(
                '{} = {}'.format(name, callback.strip())
            )
        project.entry_points['console_scripts'] = prefixed_console_scripts

    def cut_changelog(self, desc):
        def match_changelog_header(line):
            words = re.findall(r'\b\w+\b', line.lower())
            if len(words) != 1:
                return True
            if 'changelog' in words or 'changes' in words:
                return False
            return True
        return '\n'.join(takewhile(
            match_changelog_header,
            desc.splitlines()
        ))
예제 #27
0
 def __init__(self, index_url):
   PackageIndex.__init__(self, index_url)
   self.platform = None
예제 #28
0
 def process_filename(self, fn, nested=False):
     PackageIndex.process_filename(self, fn, nested)
     dist = distro_from_setup_cfg(fn)
     if dist:
         self.add(dist)
예제 #29
0
 def __init__(self, *args, **kwargs):
     self.pypi = PackageIndex()
     LabelCommand.__init__(self, *args, **kwargs)
예제 #30
0
파일: ppadd.py 프로젝트: mercurate/chishop
 def __init__(self, *args, **kwargs):
     self.pypi = PackageIndex()
     LabelCommand.__init__(self, *args, **kwargs)
예제 #31
0
    def run(self):
        # Prepare for iterations.
        pkgreqmap = reqmap()
        for reqarg in self.reqarglist:
            pkgreqmap.append_arg(reqarg)
        pkgreqmap.resolve_matchlist(self.logobj, self.options['--url'],
                                    self.options['--skip-logged'])

        pkgidx = PackageIndex(index_url=self.options['--url'])

        show_sepline = False
        # Main loop.
        distlist = []
        ok_packages = []
        while len(pkgreqmap) > 0:
            new_pkgreqmap = reqmap()
            for idx, total, pkgreqobj in pkgreqmap.reqobj_seq():

                pkgname = pkgreqobj.project_name
                if pkgname in ok_packages: continue
                ok_packages.append(pkgname)
                reqstr = str(pkgreqobj)

                if show_sepline: self.pkgsys.sepline()
                else: show_sepline = True

                self.pkgsys.info('======== %s: %d/%d ========' % \
                                     (pkgname, idx + 1, total))

                if self.options['--skip-broken']:
                    try:
                        self.logobj.check_broken(pkgname)
                    except:
                        continue

                # Collect values into args step by step.
                args = copy.copy(self.options)
                args['self'] = self.arg0

                self.pkgsys.begin('Downloading %s' % reqstr)
                try:
                    dist = pkgidx.fetch_distribution(
                        pkgreqobj, self.options['--download-dir'], source=True)
                    if dist is None:
                        raise RuntimeError, 'None'
                except:
                    self.pkgsys.end(False)
                    self.logobj.in_except(pkgname,
                                          'Download %s failed' % reqstr)
                    continue
                else:
                    self.pkgsys.end(True)

                self.pkgsys.begin('Unpacking %s' % dist.location)
                try:
                    smart_archive(args, dist, self.options['--unpack-dir'])
                except:
                    self.pkgsys.end(False)
                    self.logobj.in_except(pkgname, 'Unpack %s failed' % reqstr)
                    continue
                else:
                    self.pkgsys.end(True)
                unpackpath = args['unpackpath']

                config_secs = [
                    '%s-%s' % (dist.project_name, dist.version),
                    dist.project_name
                ]

                for secname in config_secs:
                    for name, value in config.items(secname):
                        if name not in args: args[name] = value
                if not 'patches' in args: args['patches'] = []
                else: args['patches'] = args['patches'].split()

                # Apply patches.
                for patch in config.patches(config_secs):
                    self.pkgsys.begin('Applying %s' % os.path.basename(patch))
                    os.system('(cd %s; patch -p0 < %s) > /dev/null' % \
                                  (unpackpath, patch))
                    self.pkgsys.end(True)
                    if os.path.isfile(os.path.join(unpackpath, 'fixsetup.py')):
                        os.system('(cd %s; python fixsetup.py)' % unpackpath)

                self.pkgsys.begin('Get package args')
                try:
                    get_package_args(args, dist)
                except:
                    self.pkgsys.end(False)
                    self.logobj.in_except(pkgname, 'Get package args failed')
                    continue
                else:
                    self.pkgsys.end(True)

                self.pkgsys.begin('Setup args')
                try:
                    self.pkgsys.setup_args(args)
                except:
                    self.pkgsys.end(False)
                    self.logobj.in_except(pkgname, 'pkgsys.setup_args failed')
                    continue
                else:
                    self.pkgsys.end(True)

                self.pkgsys.begin('Writing %s' % args['output'])
                try:
                    ensure_dir(os.path.dirname(args['output']))
                    if smart_write(args['output'],
                                   os.path.join(pkgroot, args['template']),
                                   args):
                        updated = True
                    if smart_symlink(
                            args['pkgpath'],
                            os.path.join(args['filedir'], args['pkgfile'])):
                        updated = True
                    if args['patches'] != []:
                        ensure_dir(args['patchdir'])
                        for patch in config.patches(config_secs):
                            tgtpatch = os.path.join(args['patchdir'],
                                                    os.path.basename(patch))
                            if smart_symlink(patch, tgtpatch):
                                updated = True
                except:
                    self.pkgsys.end(False)
                    self.logobj.in_except(pkgname, 'write failed')
                    continue
                else:
                    self.pkgsys.end(True)

                self.pkgsys.begin('Postproess %s' % args['output'])
                try:
                    self.pkgsys.process(args)
                except:
                    self.pkgsys.end(False)
                    self.logobj.in_except(pkgname, 'process failed')
                    continue
                else:
                    self.pkgsys.end(True)

                if self.options['--deps']:
                    reqstrlist = args['install_requires']
                    for k in args['extras_require'].keys():
                        reqstrlist.extend(args['extras_require'][k])
                    for reqstr in reqstrlist:
                        new_pkgreqmap.add(reqstr2obj(reqstr))

                self.logobj.pkgname_ok(pkgname)
                if self.options['--cache-root'] != '': distlist.append(dist)

                # Process of a single package is finished.

            pkgreqmap = new_pkgreqmap

        if self.options['--cache-root']:
            cache = pypicache(self.pkgsys, self.options['--cache-root'],
                              self.options['--cache-url'])
            cache.add_packages(distlist)
            del (cache)
예제 #32
0
파일: ppadd.py 프로젝트: Affirm/djangopypi2
class Command(LabelCommand):
    option_list = LabelCommand.option_list + (
            make_option("-o", "--owner", help="add packages as OWNER",
                        metavar="OWNER", default=None),
        )
    help = """Add one or more packages to the repository. Each argument can
be a package name or a URL to an archive or egg. Package names honour
the same rules as easy_install with regard to indicating versions etc.

If a version of the package exists, but is older than what we want to install,
the owner remains the same.

For new packages there needs to be an owner. If the --owner option is present
we use that value. If not, we try to match the maintainer of the package, form
the metadata, with a user in out database, based on the If it's a new package
and the maintainer emailmatches someone in our user list, we use that. If not,
the package can not be
added"""

    def __init__(self, *args, **kwargs):
        self.pypi = PackageIndex()
        LabelCommand.__init__(self, *args, **kwargs)

    def handle_label(self, label, **options):
        with tempdir() as tmp:
            path = self.pypi.download(label, tmp)
            if path:
                self._save_package(path, options["owner"])
            else:
                print "Could not add %s. Not found." % label

    def _save_package(self, path, ownerid):
        meta = self._get_meta(path)

        try:
            # can't use get_or_create as that demands there be an owner
            package = Package.objects.get(name=meta.name)
            isnewpackage = False
        except Package.DoesNotExist:
            package = Package(name=meta.name)
            isnewpackage = True

        release = package.get_release(meta.version)
        if not isnewpackage and release and release.version == meta.version:
            print "%s-%s already added" % (meta.name, meta.version)
            return

        # algorithm as follows: If owner is given, try to grab user with that
        # username from db. If doesn't exist, bail. If no owner set look at
        # mail address from metadata and try to get that user. If it exists
        # use it. If not, bail.
        owner = None

        if ownerid:
            try:
                if "@" in ownerid:
                    owner = User.objects.get(email=ownerid)
                else:
                    owner = User.objects.get(username=ownerid)
            except User.DoesNotExist:
                pass
        else:
            try:
                owner = User.objects.get(email=meta.author_email)
            except User.DoesNotExist:
                pass

        if not owner:
            print "No owner defined. Use --owner to force one"
            return

        # at this point we have metadata and an owner, can safely add it.
        package.save()

        package.owners.add(owner)
        package.maintainers.add(owner)

        for classifier in meta.classifiers:
            package.classifiers.add(
                    Classifier.objects.get_or_create(name=classifier)[0])

        release = Release()
        release.version = meta.version
        release.package = package
        release.metadata_version = meta.metadata_version
        package_info = MultiValueDict()
        package_info.update(meta.__dict__)
        release.package_info = package_info
        release.save()

        file = File(open(path, "rb"))
        if isinstance(meta, pkginfo.SDist):
            dist = 'sdist'
        elif meta.filename.endswith('.rmp') or meta.filename.endswith('.srmp'):
            dist = 'bdist_rpm'
        elif meta.filename.endswith('.exe'):
            dist = 'bdist_wininst'
        elif meta.filename.endswith('.egg'):
            dist = 'bdist_egg'
        elif meta.filename.endswith('.dmg'):
            dist = 'bdist_dmg'
        else:
            dist = 'bdist_dumb'
        release.distributions.create(content=file, uploader=owner, filetype=dist)
        print "%s-%s added" % (meta.name, meta.version)

    def _get_meta(self, path):
        data = pkginfo.get_metadata(path)
        if data:
            return data
        else:
            print "Couldn't get metadata from %s. Not added to chishop" % os.path.basename(path)
            return None
예제 #33
0
 def __init__(self, *args, **kwargs):
     PackageIndex.__init__(self, *args, **kwargs)
     self.to_scan = dists
예제 #34
0
 def load_index_servers(self):
     index_servers = []
     for index in IndexSite.objects.all():
         pypi = PackageIndex(index_url=index.url)
         pypi.scan_all()
         self.index_servers.append(pypi)
예제 #35
0
import warnings
warnings.filterwarnings('ignore')

import os
import argparse
import itertools

from paver.easy import path
from pip.req import parse_requirements
from setuptools.package_index import PackageIndex

__cache__ = path("~/.pycache").expanduser().abspath()
if not __cache__.exists():
    __cache__.makedirs()

index = PackageIndex(index_url="http://pypi.python.org/simple/",
                     search_path=[])

html = """<html>
<head><title>Index - {project}</title></head>
<body>
<h1>{project}</h1>
{body}
</body>
</html>
"""


def main(requirements):

    if not __cache__.exists():
        __cache__.makedirs()
예제 #36
0
class SetupToolsCommand(Command):
    """setuptools Command"""
    description = "Setuptools WSS plugin"

    user_options = [
        ('offline=', 'o', 'Offline flag'),
        ('pathConfig=', 'p', 'Configuration file path'),
        ('debug=', 'd', 'Show debugging output'),
    ]

    def initialize_options(self):
        self.offline = None
        self.debug = None
        self.proxySetting = None
        self.service = None
        self.configDict = None
        self.pathConfig = None
        self.token = None
        self.userEnvironment = None
        self.distDepend = None
        self.pkgIndex = PackageIndex()
        self.dependencyList = []
        self.projectCoordinates = None
        self.tmpdir = tempfile.mkdtemp(prefix="wss_python_plugin-")

    def finalize_options(self):
        # log file activation and config
        if self.debug == 'y':
            logging.basicConfig(format='%(asctime)s%(levelname)s:%(message)s', level=logging.DEBUG,
                                filename='wss_plugin.log')

        # load and import config file
        try:
            sys.path.append(self.pathConfig)
            if sys.version_info.major >= 3:
                config_file_spec = importlib.util.spec_from_file_location('config_file', self.pathConfig)
                config_file_module = importlib.util.module_from_spec(config_file_spec)
                config_file_spec.loader.exec_module(config_file_module)
                self.configDict = config_file_module.config_info
            else:
                self.configDict = imp.load_source('config_file', self.pathConfig).config_info
            logging.info('Loading config_file was successful')
        except Exception as err:
            print("Can't import the config file.")
            sys.exit(err)

        # load proxy setting if exist
        if 'proxy' in self.configDict:
            self.proxySetting = self.configDict['proxy']
        if 'index_url' in self.configDict:
            self.pkgIndex = PackageIndex(index_url=self.configDict['index_url'])

        self.projectCoordinates = Coordinates.create_project_coordinates(self.distribution)
        self.userEnvironment = pk_res.Environment([get_python_lib()], platform=None, python=None)
        distribution_specification = self.distribution.get_name() + "==" + self.distribution.get_version()
        distribution_requirement = pk_res.Requirement.parse(distribution_specification)

        # resolve all dependencies
        try:
            self.distDepend = pk_res.working_set.resolve([distribution_requirement], env=self.userEnvironment)
            self.distDepend.pop(0)
            logging.info("Finished resolving dependencies")
        except Exception as err:
            print("distribution was not found on this system, and is required by this application", err.message)

    def run(self):
        self.validate_config_file()
        self.scan_modules()
        self.create_service()
        self.run_plugin()

    def validate_config_file(self):
        """ Validate content of config file params """

        # org token
        if 'org_token' in self.configDict:
            if self.configDict['org_token'] == '':
                sys.exit("Organization token is empty")
        else:
            sys.exit("No organization token option exists")

        logging.info("Validation of config file was successful")
        # Todo: check existence of other keys in dict

    def scan_modules(self):
        """ Downloads all the dependencies calculates their sha1 and creates a list of dependencies info"""

        if self.distDepend is not None:
            for dist in self.distDepend:
                try:
                    # create a dist instance from requirement instance
                    current_requirement = dist.as_requirement()
                    current_distribution = self.pkgIndex.fetch_distribution(
                        current_requirement, self.tmpdir, force_scan=True, source=True, develop_ok=True)

                    # create dep. root
                    if current_distribution is not None:
                        self.dependencyList.append(create_dependency_record(current_distribution))

                except Exception as err:
                    print("Error in fetching dists " + dist.key + " " + dist.version)
            logging.info("Finished calculation for all dependencies")
        else:
            logging.info("No dependencies were found")

        shutil.rmtree(self.tmpdir)

    def create_service(self):
        """ Creates a WssServiceClient with the destination url"""

        if ('url_destination' in self.configDict) and (self.configDict['url_destination'] != ''):
            self.service = WssServiceClient(self.configDict['url_destination'], self.proxySetting)
        else:
            self.service = WssServiceClient("https://saas.whitesourcesoftware.com/agent", self.proxySetting)

        logging.debug("The destination url is set to: " + self.service.to_string())

    def run_plugin(self):
        """ Initializes the plugin requests"""

        org_token = self.configDict['org_token']
        user_key = ''
        project = self.create_project_obj()
        product = ''
        product_version = ''
        self.connection_retries = 1
        self.connection_retries_interval = 3

        self.policy_violation = False

        if 'product_name' in self.configDict:
            product = self.configDict['product_name']

        if 'user_key' in self.configDict:
            user_key = self.configDict['user_key']

        if 'product_version' in self.configDict:
            product_version = self.configDict['product_version']

        if 'connection_retries' in self.configDict:
            self.connection_retries = self.configDict['connection_retries']

        if 'connection_retries_interval' in self.configDict:
            self.connection_retries_interval = self.configDict['connection_retries_interval']

        if self.configDict.get('offline') or self.offline:
            logging.debug("Offline request")
            offline_request(project, org_token, user_key, product, product_version)
        else:
            if self.configDict.get('check_policies'):
                logging.debug("Checking policies")
                self.check_policies(project, org_token, user_key, product, product_version)

            # no policy violations => send update and pass build
            if not self.policy_violation:
                logging.debug("Updating inventory")
                self.update_inventory(project, org_token, user_key, product, product_version)

            # policy violation AND force_update
            elif self.configDict.get('force_update'):
                print("However all dependencies will be force updated to project inventory.")
                logging.debug("Updating inventory")
                self.update_inventory(project, org_token, user_key, product, product_version)
                # fail the build
                if self.configDict.get('fail_on_error'):
                    print("Build failure due to policy violation (fail_on_error = True)")
                    sys.exit(1)

            # policy violation AND (NOT force_update)
            elif self.configDict.get('fail_on_error'):
                # fail the build
                print("Build failure due to policy violation (fail_on_error = True)")
                sys.exit(1)

    def create_project_obj(self):
        """ create the actual project """

        project_token = None
        if 'project_token' in self.configDict:
            project_token = self.configDict['project_token']
            if project_token == '':
                project_token = None

        return AgentProjectInfo(coordinates=self.projectCoordinates, dependencies=self.dependencyList,
                                project_token=project_token)

    def check_policies(self, project_info, token, user_key, product_name, product_version):
        """ Sends the check policies request to the agent according to the request type """

        projects = [project_info]

        force_check_all_dependencies = self.configDict.get('force_check_all_dependencies')
        request = CheckPoliciesRequest(token, user_key, product_name, product_version, projects, force_check_all_dependencies)

        result = self.service.check_policies(request, self.connection_retries, self.connection_retries_interval)

        try:
            self.handle_policies_result(result)
        except Exception:
            logging.warning("Some dependencies do not conform with open source policies")
            sys.exit(1)

    def handle_policies_result(self, result):
        """ Checks if any policies rejected if so stops """

        logging.debug("Creating policies report")
        if result.has_rejections():
            self.policy_violation = True
            print("Some dependencies do not conform with open source policies:")
            print_policies_rejection(result)
        else:
            logging.debug("All dependencies conform with open source policies!")

    def update_inventory(self, project_info, token, user_key, product_name, product_version):
        """ Sends the update request to the agent according to the request type """

        logging.debug("Updating White Source")

        projects = [project_info]
        request = UpdateInventoryRequest(token, user_key, product_name, product_version, projects)
        result = self.service.update_inventory(request, self.connection_retries, self.connection_retries_interval)
        print_update_result(result)
예제 #37
0
class SetupToolsCommand(Command):
    """setuptools Command"""

    description = "Setuptools WSS plugin"

    user_options = [("pathConfig=", "p", "Configuration file path"), ("debug=", "d", "Show debugging output")]

    def initialize_options(self):
        self.debug = None
        self.proxySetting = None
        self.service = None
        self.configDict = None
        self.pathConfig = None
        self.token = None
        self.userEnvironment = None
        self.distDepend = None
        self.pkgIndex = PackageIndex()
        self.dependencyList = []
        self.projectCoordinates = None
        self.tmpdir = tempfile.mkdtemp(prefix="wss_python_plugin-")

    def finalize_options(self):
        # log file activation and config
        if self.debug == "y":
            logging.basicConfig(
                format="%(asctime)s%(levelname)s:%(message)s", level=logging.DEBUG, filename="wss_plugin.log"
            )

        # load and import config file
        try:
            sys.path.append(self.pathConfig)
            self.configDict = __import__("config_file").config_info
            logging.info("Loading config_file was successful")
        except Exception as err:
            sys.exit("Can't import the config file." + err.message)

        # load proxy setting if exist
        if "proxy" in self.configDict:
            self.proxySetting = self.configDict["proxy"]
        if "index_url" in self.configDict:
            self.pkgIndex = PackageIndex(index_url=self.configDict["index_url"])

        self.projectCoordinates = Coordinates.create_project_coordinates(self.distribution)
        self.userEnvironment = pk_res.Environment(get_python_lib(), platform=None, python=None)
        distribution_specification = self.distribution.get_name() + "==" + self.distribution.get_version()
        distribution_requirement = pk_res.Requirement.parse(distribution_specification)

        # resolve all dependencies
        try:
            self.distDepend = pk_res.working_set.resolve([distribution_requirement], env=self.userEnvironment)
            self.distDepend.pop(0)
            logging.info("Finished resolving dependencies")
        except Exception as err:
            print "distribution was not found on this system, and is required by this application", err.message

    def run(self):
        self.validate_config_file()
        self.scan_modules()
        self.create_service()
        self.run_plugin()

    def validate_config_file(self):
        """ Validate content of config file params """

        # org token
        if "org_token" in self.configDict:
            if self.configDict["org_token"] == "":
                sys.exit("Organization token is empty")
        else:
            sys.exit("No organization token option exists")

        logging.info("Validation of config file was successful")
        # Todo: check existence of other keys in dict

    def scan_modules(self):
        """ Downloads all the dependencies calculates their sha1 and creates a list of dependencies info"""

        if self.distDepend is not None:
            for dist in self.distDepend:
                try:
                    # create a dist instance from requirement instance
                    current_requirement = dist.as_requirement()
                    current_distribution = self.pkgIndex.fetch_distribution(
                        current_requirement, self.tmpdir, force_scan=True, source=True, develop_ok=True
                    )

                    # create dep. root
                    if current_distribution is not None:
                        self.dependencyList.append(create_dependency_record(current_distribution))

                except Exception as err:
                    print "Error in fetching dists " + dist.key + " " + dist.version
            logging.info("Finished calculation for all dependencies")
        else:
            logging.info("No dependencies were found")

        shutil.rmtree(self.tmpdir)

    def create_service(self):
        """ Creates a WssServiceClient with the destination url"""

        if ("url_destination" in self.configDict) and (self.configDict["url_destination"] != ""):
            self.service = WssServiceClient(self.configDict["url_destination"], self.proxySetting)
        else:
            self.service = WssServiceClient("https://saas.whitesourcesoftware.com/agent", self.proxySetting)

        logging.debug("The destination url is set to: " + self.service.to_string())

    def run_plugin(self):
        """ Initializes the plugin requests"""

        org_token = self.configDict["org_token"]
        project = self.create_project_obj()
        product = ""
        product_version = ""

        if "product_name" in self.configDict:
            product = self.configDict["product_name"]

        if "product_version" in self.configDict:
            product_version = self.configDict["product_version"]

        self.check_policies(project, org_token, product, product_version)
        self.update_inventory(project, org_token, product, product_version)

    def create_project_obj(self):
        """ create the actual project """

        project_token = None
        if "project_token" in self.configDict:
            project_token = self.configDict["project_token"]
            if project_token == "":
                project_token = None

        return AgentProjectInfo(self.projectCoordinates, self.dependencyList, project_token)

    def check_policies(self, project_info, token, product_name, product_version):
        """ Sends the check policies request to the agent according to the request type """

        if ("check_policies" in self.configDict) and (self.configDict["check_policies"]):
            logging.debug("Checking policies")

            projects = [project_info]
            request = CheckPoliciesRequest(token, product_name, product_version, projects)
            result = self.service.check_policies(request)

            try:
                self.handle_policies_result(result)
            except Exception as err:
                sys.exit("Some dependencies do not conform with open source policies")

    def handle_policies_result(self, result):
        """ Checks if any policies rejected if so stops """

        logging.debug("Creating policies report")
        if result.has_rejections():
            print_policies_rejection(result)
            logging.info("Some dependencies do not conform with open source policies")
            raise
        else:
            logging.debug("All dependencies conform with open source policies")

    def update_inventory(self, project_info, token, product_name, product_version):
        """ Sends the update request to the agent according to the request type """

        logging.debug("Updating White Source")

        projects = [project_info]
        request = UpdateInventoryRequest(token, product_name, product_version, projects)
        result = self.service.update_inventory(request)
        print_update_result(result)
예제 #38
0
    def run(self):
        # Prepare for iterations.
        pkgreqmap = reqmap()
        for reqarg in self.reqarglist: pkgreqmap.append_arg(reqarg)
        pkgreqmap.resolve_matchlist(self.logobj, self.options['--url'],
                                    self.options['--skip-logged'])

        pkgidx = PackageIndex(index_url = self.options['--url'])

        show_sepline = False
        # Main loop.
        distlist = []
        ok_packages = []
        while len(pkgreqmap) > 0:
            new_pkgreqmap = reqmap()
            for idx, total, pkgreqobj in pkgreqmap.reqobj_seq():

                pkgname = pkgreqobj.project_name
                if pkgname in ok_packages: continue
                ok_packages.append(pkgname)
                reqstr = str(pkgreqobj)

                if show_sepline: self.pkgsys.sepline()
                else: show_sepline = True

                self.pkgsys.info('======== %s: %d/%d ========' % \
                                     (pkgname, idx + 1, total))

                if self.options['--skip-broken']:
                    try: self.logobj.check_broken(pkgname)
                    except: continue

                # Collect values into args step by step.
                args = copy.copy(self.options)
                args['self'] = self.arg0

                self.pkgsys.begin('Downloading %s' % reqstr)
                try:
                    dist = pkgidx.fetch_distribution(pkgreqobj,
                                                     self.options['--download-dir'],
                                                     source = True)
                    if dist is None:
                        raise RuntimeError, 'None'
                except:
                    self.pkgsys.end(False)
                    self.logobj.in_except(pkgname,
                                          'Download %s failed' % reqstr)
                    continue
                else:
                    self.pkgsys.end(True)

                self.pkgsys.begin('Unpacking %s' % dist.location)
                try: smart_archive(args, dist, self.options['--unpack-dir'])
                except:
                    self.pkgsys.end(False)
                    self.logobj.in_except(pkgname, 'Unpack %s failed' % reqstr)
                    continue
                else:
                    self.pkgsys.end(True)
                unpackpath = args['unpackpath']

                config_secs = ['%s-%s' % (dist.project_name, dist.version),
                               dist.project_name]

                for secname in config_secs:
                    for name, value in config.items(secname):
                        if name not in args: args[name] = value
                if not 'patches' in args: args['patches'] = []
                else: args['patches'] = args['patches'].split()

                # Apply patches.
                for patch in config.patches(config_secs):
                    self.pkgsys.begin('Applying %s' % os.path.basename(patch))
                    os.system('(cd %s; patch -p0 < %s) > /dev/null' % \
                                  (unpackpath, patch))
                    self.pkgsys.end(True)
                    if os.path.isfile(os.path.join(unpackpath, 'fixsetup.py')):
                        os.system('(cd %s; python fixsetup.py)' % unpackpath)

                self.pkgsys.begin('Get package args')
                try: get_package_args(args, dist)
                except:
                    self.pkgsys.end(False)
                    self.logobj.in_except(pkgname, 'Get package args failed')
                    continue
                else:
                    self.pkgsys.end(True)

                self.pkgsys.begin('Setup args')
                try: self.pkgsys.setup_args(args)
                except:
                    self.pkgsys.end(False)
                    self.logobj.in_except(pkgname, 'pkgsys.setup_args failed')
                    continue
                else:
                    self.pkgsys.end(True)

                self.pkgsys.begin('Writing %s' % args['output'])
                try:
                    ensure_dir(os.path.dirname(args['output']))
                    if smart_write(args['output'],
                                   os.path.join(pkgroot, args['template']),
                                   args):
                        updated = True
                    if smart_symlink(args['pkgpath'],
                                     os.path.join(args['filedir'],
                                                  args['pkgfile'])):
                        updated = True
                    if args['patches'] != []:
                        ensure_dir(args['patchdir'])
                        for patch in config.patches(config_secs):
                            tgtpatch = os.path.join(args['patchdir'],
                                                    os.path.basename(patch))
                            if smart_symlink(patch, tgtpatch):
                                updated = True
                except:
                    self.pkgsys.end(False)
                    self.logobj.in_except(pkgname, 'write failed')
                    continue
                else:
                    self.pkgsys.end(True)

                self.pkgsys.begin('Postproess %s' % args['output'])
                try: self.pkgsys.process(args)
                except:
                    self.pkgsys.end(False)
                    self.logobj.in_except(pkgname, 'process failed')
                    continue
                else:
                    self.pkgsys.end(True)

                if self.options['--deps']:
                    reqstrlist = args['install_requires']
                    for k in args['extras_require'].keys():
                        reqstrlist.extend(args['extras_require'][k])
                    for reqstr in reqstrlist:
                        new_pkgreqmap.add(reqstr2obj(reqstr))

                self.logobj.pkgname_ok(pkgname)
                if self.options['--cache-root'] != '': distlist.append(dist)

                # Process of a single package is finished.

            pkgreqmap = new_pkgreqmap

        if self.options['--cache-root']:
            cache = pypicache(self.pkgsys,
                              self.options['--cache-root'],
                              self.options['--cache-url'])
            cache.add_packages(distlist)
            del(cache)
예제 #39
0
 def __init__(self, *args, **kwargs):
     PackageIndex.__init__(self, *args, **kwargs)
     self.debug_msgs = []
     self.info_msgs = []
     self.warn_msgs = []
예제 #40
0
파일: ppadd.py 프로젝트: mercurate/chishop
class Command(LabelCommand):
    option_list = LabelCommand.option_list + (
        make_option("-o", "--owner", help="add packages as OWNER", metavar="OWNER", default=None),
    )
    help = """Add one or more packages to the repository. Each argument can
be a package name or a URL to an archive or egg. Package names honour
the same rules as easy_install with regard to indicating versions etc.

If a version of the package exists, but is older than what we want to install,
the owner remains the same.

For new packages there needs to be an owner. If the --owner option is present
we use that value. If not, we try to match the maintainer of the package, form
the metadata, with a user in out database, based on the If it's a new package
and the maintainer emailmatches someone in our user list, we use that. If not,
the package can not be
added"""

    def __init__(self, *args, **kwargs):
        self.pypi = PackageIndex()
        LabelCommand.__init__(self, *args, **kwargs)

    def handle_label(self, label, **options):
        with tempdir() as tmp:
            path = self.pypi.download(label, tmp)
            if path:
                self._save_package(path, options["owner"])
            else:
                print "Could not add %s. Not found." % label

    def _save_package(self, path, ownerid):
        meta = self._get_meta(path)

        try:
            # can't use get_or_create as that demands there be an owner
            project = Project.objects.get(name=meta.name)
            isnewproject = False
        except Project.DoesNotExist:
            project = Project(name=meta.name)
            isnewproject = True

        release = project.get_release(meta.version)
        if not isnewproject and release and release.version == meta.version:
            print "%s-%s already added" % (meta.name, meta.version)
            return

        # algorithm as follows: If owner is given, try to grab user with that
        # username from db. If doesn't exist, bail. If no owner set look at
        # mail address from metadata and try to get that user. If it exists
        # use it. If not, bail.
        owner = None

        if ownerid:
            try:
                if "@" in ownerid:
                    owner = User.objects.get(email=ownerid)
                else:
                    owner = User.objects.get(username=ownerid)
            except User.DoesNotExist:
                pass
        else:
            try:
                owner = User.objects.get(email=meta.author_email)
            except User.DoesNotExist:
                pass

        if not owner:
            print "No owner defined. Use --owner to force one"
            return

        # at this point we have metadata and an owner, can safely add it.

        project.owner = owner
        # Some packages don't have proper licence, seems to be a problem
        # with setup.py upload. Use "UNKNOWN"
        project.license = meta.license or "Unknown"
        project.metadata_version = meta.metadata_version
        project.author = meta.author
        project.home_page = meta.home_page
        project.download_url = meta.download_url
        project.summary = meta.summary
        project.description = meta.description
        project.author_email = meta.author_email

        project.save()

        for classifier in meta.classifiers:
            project.classifiers.add(Classifier.objects.get_or_create(name=classifier)[0])

        release = Release()
        release.version = meta.version
        release.project = project
        filename = os.path.basename(path)

        file = File(open(path, "rb"))
        release.distribution.save(filename, file)
        release.save()
        print "%s-%s added" % (meta.name, meta.version)

    def _get_meta(self, path):
        data = pkginfo.get_metadata(path)
        if data:
            return data
        else:
            print "Couldn't get metadata from %s. Not added to chishop" % os.path.basename(path)
            return None
예제 #41
0
class Command(LabelCommand):
    option_list = LabelCommand.option_list + (make_option(
        "-o",
        "--owner",
        help="add packages as OWNER",
        metavar="OWNER",
        default=None), )
    help = """Add one or more packages to the repository. Each argument can
be a package name or a URL to an archive or egg. Package names honour
the same rules as easy_install with regard to indicating versions etc.

If a version of the package exists, but is older than what we want to install,
the owner remains the same.

For new packages there needs to be an owner. If the --owner option is present
we use that value. If not, we try to match the maintainer of the package, form
the metadata, with a user in out database, based on the If it's a new package
and the maintainer emailmatches someone in our user list, we use that. If not,
the package can not be
added"""

    def __init__(self, *args, **kwargs):
        self.pypi = PackageIndex()
        LabelCommand.__init__(self, *args, **kwargs)

    def handle_label(self, label, **options):
        with tempdir() as tmp:
            path = self.pypi.download(label, tmp)
            if path:
                self._save_package(path, options["owner"])
            else:
                print "Could not add %s. Not found." % label

    def _save_package(self, path, ownerid):
        meta = self._get_meta(path)

        try:
            # can't use get_or_create as that demands there be an owner
            package = Package.objects.get(name=meta.name)
            isnewpackage = False
        except Package.DoesNotExist:
            package = Package(name=meta.name)
            isnewpackage = True

        release = package.get_release(meta.version)
        if not isnewpackage and release and release.version == meta.version:
            print "%s-%s already added" % (meta.name, meta.version)
            return

        # algorithm as follows: If owner is given, try to grab user with that
        # username from db. If doesn't exist, bail. If no owner set look at
        # mail address from metadata and try to get that user. If it exists
        # use it. If not, bail.
        owner = None

        if ownerid:
            try:
                if "@" in ownerid:
                    owner = User.objects.get(email=ownerid)
                else:
                    owner = User.objects.get(username=ownerid)
            except User.DoesNotExist:
                pass
        else:
            try:
                owner = User.objects.get(email=meta.author_email)
            except User.DoesNotExist:
                pass

        if not owner:
            print "No owner defined. Use --owner to force one"
            return

        # at this point we have metadata and an owner, can safely add it.
        package.save()

        package.owners.add(owner)
        package.maintainers.add(owner)

        for classifier in meta.classifiers:
            package.classifiers.add(
                Classifier.objects.get_or_create(name=classifier)[0])

        release = Release()
        release.version = meta.version
        release.package = package
        release.metadata_version = meta.metadata_version
        package_info = MultiValueDict()
        package_info.update(meta.__dict__)
        release.package_info = package_info
        release.save()

        file = File(open(path, "rb"))
        if isinstance(meta, pkginfo.SDist):
            dist = 'sdist'
        elif meta.filename.endswith('.rmp') or meta.filename.endswith('.srmp'):
            dist = 'bdist_rpm'
        elif meta.filename.endswith('.exe'):
            dist = 'bdist_wininst'
        elif meta.filename.endswith('.egg'):
            dist = 'bdist_egg'
        elif meta.filename.endswith('.dmg'):
            dist = 'bdist_dmg'
        else:
            dist = 'bdist_dumb'
        release.distributions.create(content=file,
                                     uploader=owner,
                                     filetype=dist)
        print "%s-%s added" % (meta.name, meta.version)

    def _get_meta(self, path):
        data = pkginfo.get_metadata(path)
        if data:
            return data
        else:
            print "Couldn't get metadata from %s. Not added to chishop" % os.path.basename(
                path)
            return None
예제 #42
0
class Command(LabelCommand):
    option_list = LabelCommand.option_list + (make_option(
        "-o",
        "--owner",
        help="add packages as OWNER",
        metavar="OWNER",
        default=None), )
    help = """Add one or more packages to the repository. Each argument can
be a package name or a URL to an archive or egg. Package names honour
the same rules as easy_install with regard to indicating versions etc.

If a version of the package exists, but is older than what we want to install,
the owner remains the same.

For new packages there needs to be an owner. If the --owner option is present
we use that value. If not, we try to match the maintainer of the package, form
the metadata, with a user in out database, based on the If it's a new package
and the maintainer emailmatches someone in our user list, we use that. If not,
the package can not be
added"""

    def __init__(self, *args, **kwargs):
        self.pypi = PackageIndex()
        LabelCommand.__init__(self, *args, **kwargs)

    def handle_label(self, label, **options):
        with tempdir() as tmp:
            path = self.pypi.download(label, tmp)
            if path:
                self._save_package(path, options["owner"])
            else:
                print "Could not add %s. Not found." % label

    def _save_package(self, path, ownerid):
        meta = self._get_meta(path)

        try:
            # can't use get_or_create as that demands there be an owner
            project = Project.objects.get(name=meta.name)
            isnewproject = False
        except Project.DoesNotExist:
            project = Project(name=meta.name)
            isnewproject = True

        release = project.get_release(meta.version)
        if not isnewproject and release and release.version == meta.version:
            print "%s-%s already added" % (meta.name, meta.version)
            return

        # algorithm as follows: If owner is given, try to grab user with that
        # username from db. If doesn't exist, bail. If no owner set look at
        # mail address from metadata and try to get that user. If it exists
        # use it. If not, bail.
        owner = None

        if ownerid:
            try:
                if "@" in ownerid:
                    owner = User.objects.get(email=ownerid)
                else:
                    owner = User.objects.get(username=ownerid)
            except User.DoesNotExist:
                pass
        else:
            try:
                owner = User.objects.get(email=meta.author_email)
            except User.DoesNotExist:
                pass

        if not owner:
            print "No owner defined. Use --owner to force one"
            return

        # at this point we have metadata and an owner, can safely add it.

        project.owner = owner
        # Some packages don't have proper licence, seems to be a problem
        # with setup.py upload. Use "UNKNOWN"
        project.license = meta.license or "Unknown"
        project.metadata_version = meta.metadata_version
        project.author = meta.author
        project.home_page = meta.home_page
        project.download_url = meta.download_url
        project.summary = meta.summary
        project.description = meta.description
        project.author_email = meta.author_email

        project.save()

        for classifier in meta.classifiers:
            project.classifiers.add(
                Classifier.objects.get_or_create(name=classifier)[0])

        release = Release()
        release.version = meta.version
        release.project = project
        filename = os.path.basename(path)

        file = File(open(path, "rb"))
        release.distribution.save(filename, file)
        release.save()
        print "%s-%s added" % (meta.name, meta.version)

    def _get_meta(self, path):
        data = pkginfo.get_metadata(path)
        if data:
            return data
        else:
            print "Couldn't get metadata from %s. Not added to chishop" % os.path.basename(
                path)
            return None
예제 #43
0
class bdist_pkg(Command):
    description = 'create FreeBSD pkg distribution'

    user_options = [
        ('bdist-base=', 'b',
         'Base directory for creating built distributions.'),
        ('dist-dir=', 'd', 'Directory to put distribute files in.'),
        ('format=', 'f',
         'Set format as the package output format.  It can be one'
         ' of txz, tbz, tgz or tar.  If an invalid or no format is specified'
         ' tgz is assumed.'),
        ('keep-temp', None, 'Keep intermediate build directories and files.'),
        ('origin=', None, 'Custom origin name for build package.'),
        ('use-pypi-deps', None,
         'Automatically convert unknown Python dependencies to package ones.'
         ' Note that those dependencies will be named with py{}{}- prefix and'
         ' assumes that you have such packages in repository.'
         ''.format(*sys.version_info[:2])),
        ('use-wheel', None,
         'Use bdist_wheel to generated install layout instead of install'
         ' command.'),
        ('with-py-prefix', None, 'Prepends py{}{}- prefix to package name.'
         ''.format(*sys.version_info[:2])),
    ]
    boolean_options = ('keep-temp', 'use-wheel', 'python-deps-to-pkg',
                       'with-py-prefix')

    compressor_for_format = {
        'txz': lzma,
        'tgz': gzip,
        'tbz': bz2,
    }

    def initialize_options(self):
        self.bdist_base = None
        self.dist_dir = None
        self.format = None
        self.keep_temp = False
        self.name_prefix = None
        self.package_index = PackageIndex()
        self.requirements_mapping = None
        self.selected_options = None
        self.use_pypi_deps = False
        self.use_wheel = False
        self.with_py_prefix = False
        self.initialize_manifest_options()

    def initialize_manifest_options(self):
        # TODO: What is it and how to use it?
        # self.annotations = None
        self.abi = None
        self.arch = None
        self.categories = None
        # TODO: Could conflicts be useful for us?
        # self.conflicts = None
        self.comment = None
        # TODO: What is it and how to use it?
        # self.dep_formula = None
        self.deps = None
        self.desc = None
        # These fields are autogenerated:
        # self.directories = None
        # self.dirs = None
        # self.files = None
        # self.flatsize = None
        self.groups = None
        self.license = None
        self.maintainer = None
        # TODO: should that be single message or multiple ones?
        # self.messages = None
        self.name = None
        self.options = None
        self.selected_options = None
        # Since we use extras, which don't have either defaults or descriptions
        # these fields are not supported so far:
        # self.options_defaults = None
        # self.options_descriptions = None
        self.origin = None
        # TODO: What is the path?
        # self.path = None
        self.prefix = None
        self.provides = None
        self.requires = None
        self.scripts = None
        # TODO: Do we need shared libs support?
        # self.shlibs = None
        # self.shlibs_provides = None
        # self.shlibs_requires = None
        # TODO: Support checksum.
        # self.sum = None
        self.users = None
        self.version = None
        # TODO: Can Python packages be vital?
        # self.vital = None
        self.www = None

    def finalize_options(self):
        self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
        self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
        self.ensure_format('tgz')
        self.bdist_dir = os.path.join(self.bdist_base, 'pkg')
        self.install_dir = os.path.join(self.bdist_dir, 'root')
        self.finalize_manifest_options()

    def finalize_manifest_options(self):
        project = self.distribution
        self.ensure_string('abi', self.get_abi())
        self.ensure_string('arch', self.get_arch())
        self.ensure_categories(project)
        self.ensure_string('comment', project.get_description())
        self.ensure_desc(project)
        self.ensure_string_list('groups')
        self.ensure_string('license', self.resolve_license(project))
        self.ensure_string('maintainer', self.get_maintainer(project))
        self.ensure_name(project)
        self.ensure_string('origin', self.get_default_origin(project))
        self.ensure_prefix('/usr/local')
        self.ensure_string_list('provides')
        self.ensure_string_list('requires')
        self.ensure_scripts()
        self.ensure_string('version', project.get_version())
        self.ensure_string_list('users')
        self.ensure_string('www', project.get_url())
        self.ensure_options()
        self.ensure_deps()
        self.maybe_rename_console_scripts(project)

    def run(self):
        self.build_and_install()
        self.make_pkg(self.generate_manifest_content())
        self.maybe_remove_temp(self.bdist_base)

    def build_and_install(self):
        if self.use_wheel:
            self.build_and_install_via_wheel()
        else:
            self.build_and_install_via_setuptools()

    def build_and_install_via_setuptools(self):
        # Basically, we need the intermediate results of bdist_dumb,
        # but since it's too monolithic and does the stuff that we would like
        # to avoid, here short copy-paste happens /:
        build = self.reinitialize_command('build', reinit_subcommands=1)
        build.build_base = self.bdist_base
        self.run_command('build')
        install = self.reinitialize_command('install', reinit_subcommands=1)
        install.prefix = self.prefix
        install.root = self.install_dir
        install.warn_dir = 0
        self.run_command('install')

    def build_and_install_via_wheel(self):
        if not wheel_available:
            raise RuntimeError('The `wheel` package is not available.')
        build = self.reinitialize_command('build', reinit_subcommands=1)
        build.build_base = self.bdist_base
        bdist_wheel = self.reinitialize_command('bdist_wheel',
                                                reinit_subcommands=1)
        bdist_wheel.bdist_base = self.bdist_base
        bdist_wheel.keep_temp = True
        self.run_command('bdist_wheel')
        name = self.distribution.get_name()
        pip.wheel.move_wheel_files(
            name=self.name,
            req=WhlRequirement.parse('{}=={}'.format(name, self.version)),
            wheeldir=bdist_wheel.bdist_dir,
            root=self.install_dir,
            prefix=self.prefix,
        )

    def generate_manifest_content(self):
        manifest = {
            'abi': self.abi,
            'arch': self.arch,
            'categories': self.categories,
            'comment': self.comment,
            'deps': self.deps,
            'desc': self.desc,
            'directories': {},
            'files': {},
            'flatsize': 0,
            'groups': self.groups,
            'licenselogic': 'single',
            'licenses': [self.license] if self.license else [],
            'maintainer': self.maintainer,
            'name': self.name,
            'options': self.options,
            'origin': self.origin,
            'prefix': self.prefix,
            'provides': self.provides,
            'requires': self.requires,
            'scripts': self.scripts,
            'users': self.users,
            'version': self.version,
            'www': self.www,
        }

        mdirs = manifest['directories']
        mfiles = manifest['files']
        for real_file_path, install_path in self.iter_install_files():
            with open(real_file_path, 'rb') as fh:
                data = fh.read()
                manifest['flatsize'] += len(data)
                mdirs[os.path.dirname(install_path)] = {
                    'gname': 'wheel',
                    'perm': '0755',
                    'uname': 'root',
                }
                mfiles[install_path] = {
                    'gname': 'wheel',
                    'perm': '0644',
                    'sum': hashlib.sha256(data).hexdigest(),
                    'uname': 'root',
                }

        # TODO: Should we keep UNKNOWN values?
        manifest = {
            key: value
            for key, value in manifest.items() if value and value != 'UNKNOWN'
        }

        if 'name' not in manifest:
            raise DistutilsOptionError('Project must have name defined')

        if 'version' not in manifest:
            raise DistutilsOptionError('Project must have version defined')

        if 'comment' not in manifest:
            raise DistutilsOptionError('Project must have description defined')

        if 'desc' not in manifest:
            raise DistutilsOptionError('Project must have long_description'
                                       ' defined')

        if 'maintainer' not in manifest:
            raise DistutilsOptionError('Project must have author or maintainer'
                                       ' defined')

        return manifest

    def make_pkg(self, manifest):
        manifest_path = self.make_manifest(manifest)
        compact_manifest_path = self.make_compact_manifest(manifest)
        files_paths = chain(
            [(manifest_path, os.path.basename(manifest_path)),
             (compact_manifest_path, os.path.basename(compact_manifest_path))],
            self.iter_install_files())

        self.mkpath(self.dist_dir)
        tar_path = self.make_tar(files_paths)

        ext = self.format
        if ext != 'tar':
            compressor = self.get_compressor(ext)
            if compressor is None:
                raise RuntimeError('Format {} is not supported'.format(ext))
            self.compress_tar(tar_path, ext, compressor)
            os.remove(tar_path)

    def make_manifest(self, content):
        path = os.path.join(self.bdist_dir, '+MANIFEST')
        with open(path, 'w') as fobj:
            json.dump(content, fobj, sort_keys=True, indent=4)
        return path

    def make_compact_manifest(self, content):
        path = os.path.join(self.bdist_dir, '+COMPACT_MANIFEST')
        compact_content = content.copy()
        compact_content.pop('directories')
        compact_content.pop('files')
        with open(path, 'w') as fobj:
            json.dump(compact_content, fobj, sort_keys=True, indent=4)
        return path

    def make_tar(self, files_paths):
        basename = '{}-{}.tar'.format(self.name, self.version)
        path = os.path.join(self.dist_dir, basename)
        seen = set()
        with tarfile.open(path, 'w') as tar:
            for file_path, tar_path in files_paths:
                tar_dir_path = os.path.dirname(tar_path)
                if tar_dir_path and tar_dir_path not in seen:
                    tarinfo = tar.gettarinfo(os.path.dirname(file_path),
                                             tar_dir_path)
                    tarinfo.name = tar_dir_path
                    tar.addfile(tarinfo)
                    seen.add(tar_dir_path)
                tarinfo = tar.gettarinfo(file_path, tar_path)
                tarinfo.name = tar_path
                with open(file_path, 'rb') as f:
                    tar.addfile(tarinfo, f)
        return path

    def compress_tar(self, tar_path, ext, compressor):
        txx_path = tar_path.rsplit('.tar', 1)[0] + '.' + ext
        with compressor.open(txx_path, 'w') as txx:
            with open(tar_path, 'rb') as tar:
                txx.write(tar.read())
        return txx_path

    def get_compressor(self, format):
        return self.compressor_for_format.get(format)

    def get_abi(self):
        if platform.system().lower() != 'freebsd':
            if not self.distribution.is_pure():
                raise DistutilsOptionError(
                    'Unable to determine default ABI value'
                    ' since bdist_pkg call happens not on FreeBSD system.'
                    ' Please specify this value according the target system'
                    ' for which you build this package.')
            return '*'
        return ':'.join((
            platform.system(),
            # 10.1-STABLE-r273058 -> 10
            platform.release().split('-', 1)[0].split('.')[0],
            # TODO: ensure that platform.machine() gives correct values
            platform.machine()))

    def get_arch(self):
        if platform.system().lower() != 'freebsd':
            if not self.distribution.is_pure():
                raise DistutilsOptionError(
                    'Unable to determine default ARCH value'
                    ' since bdist_pkg call happens not on FreeBSD system.'
                    ' Please specify this value according the target system'
                    ' for which you build this package.')
            return '*'
        return ':'.join((
            platform.system(),
            # 10.1-STABLE-r273058 -> 10
            platform.release().split('-', 1)[0].split('.')[0],
            # TODO: shouldn't there be a better way?
            'x86:64' if platform.machine() == 'amd64' else 'x86:32'))

    def get_default_origin(self, project):
        return 'devel/py{}{}-{}'.format(sys.version_info[0],
                                        sys.version_info[1],
                                        project.get_name())

    def get_maintainer(self, project):
        maintainer = '{} <{}>'.format(project.get_maintainer(),
                                      project.get_maintainer_email())
        if maintainer == 'UNKNOWN <UNKNOWN>':
            # No explicit maintainer specified, use author contact instead
            maintainer = '{} <{}>'.format(project.get_author(),
                                          project.get_author_email())
        return maintainer

    def resolve_license(self, project):
        # Thanks for this mapping goes to pytoport project
        py2freebsd_mapping = {
            'agpl-3.0': 'AGPLv3',
            'apache-2.0': 'APACHE20',
            'artistic-2.0': 'ART20',
            'bsd-2-clause': 'BSD2CLAUSE',
            'bsd-3-clause-clear': 'BSD3CLAUSE',
            'bsd-3-clause': 'BSD3CLAUSE',
            'cc0-1.0': 'CC0-1.0',
            'epl-1.0': 'EPL',
            'gpl-2.0': 'GPLv2',
            'gpl-3.0': 'GPLv3',
            'isc': 'ISCL',
            'lgpl-2.1': 'LGPL21',
            'lgpl-3.0': 'LGPL3',
            'mit': 'MIT',
            'mpl-2.0': 'MPL',
            'ofl-1.1': 'OFL11',
        }
        license = project.get_license()
        pkg_license = py2freebsd_mapping.get(license.lower())
        if license != 'UNKNOWN' and pkg_license is None:
            self.warn('Unable to convert license %s to PKG naming' % license)
            return license
        return pkg_license

    def ensure_format(self, default):
        self.ensure_string('format', default)
        if self.format not in {'txz', 'tbz', 'tgz', 'tar'}:
            self.warn('Unknown format {!r}, falling back to {}'
                      ''.format(self.format, default))
            self.format = default

    def ensure_prefix(self, default=None):
        self.ensure_string('prefix', default)
        self.prefix = self.prefix.rstrip('/')

    def ensure_categories(self, project):
        self.categories = self.categories or project.get_keywords()
        self.ensure_string_list('categories')

    def ensure_deps(self):
        install_requires = set(self.distribution.install_requires or [])
        for option in self.selected_options:
            install_requires |= set(self.distribution.extras_require[option])
        mapping = self.requirements_mapping or {}
        self.deps = self.deps or {}

        seen_deps = set([])
        for python_dep, spec in mapping.items():
            if not isinstance(python_dep, str):
                raise DistutilsOptionError('Invalid Python dependency: {}'
                                           ''.format(python_dep))

            if python_dep not in install_requires:
                raise DistutilsOptionError('{} is not in install requires list'
                                           ''.format(python_dep))

            if not isinstance(spec, dict):
                raise DistutilsOptionError('requirements_mapping items must be'
                                           ' dict, got {}'.format(repr(spec)))
            if set(spec) != {'origin', 'version', 'name'}:
                raise DistutilsOptionError('requirements_mapping items must'
                                           ' have "origin" and "version" keys,'
                                           ' got {}'.format(set(spec)))
            for key in {'origin', 'version', 'name'}:
                if not isinstance(spec[key], str):
                    raise DistutilsOptionError('"{}" value must be string, got'
                                               ' {}'.format(key, spec[key]))

            self.deps[spec['name']] = {
                'origin': spec['origin'],
                'version': spec['version']
            }
            seen_deps.add(python_dep)

        missing = seen_deps ^ install_requires
        if missing and self.use_pypi_deps:
            for item in missing:
                requirement = Requirement.parse(item)
                distribution = self.package_index.obtain(requirement)
                key = 'py{1}{2}-{0}'.format(distribution.key,
                                            *sys.version_info[:2])
                self.deps[key] = {
                    'origin': 'pypi/py-{}'.format(distribution.key),
                    'version': distribution.version
                }
        elif missing:
            raise DistutilsOptionError('These packages are listed in install'
                                       ' requirements, but not in bdist_pkg'
                                       ' requirements mapping: {}'
                                       ''.format(', '.join(missing)))

    def ensure_desc(self, project):
        desc = project.get_long_description()
        desc = desc if desc != 'UKNOWN' else project.get_description()
        desc = self.cut_changelog(desc)
        self.ensure_string('desc', desc)

    def ensure_name(self, project):
        name = project.get_name()
        if self.with_py_prefix:
            name = 'py{}{}-{}'.format(sys.version_info[0], sys.version_info[1],
                                      name)
        self.ensure_string('name', name)

    def ensure_options(self):
        provided_options = set(self.distribution.extras_require or {})
        self.selected_options = set(self.selected_options or [])
        unknown_options = self.selected_options - provided_options
        if not unknown_options:
            self.options = {
                option: option in self.selected_options
                for option in provided_options
            }
        else:
            raise DistutilsOptionError('Unknown extras selected: {}'
                                       ''.format(', '.join(unknown_options)))

    def ensure_scripts(self):
        if self.scripts is None:
            return
        if not isinstance(self.scripts, dict):
            raise DistutilsOptionError('scripts must be a dict, got {}'
                                       ''.format(self.scripts))
        valid_keys = {
            'pre-install',
            'post-install',
            'install',
            'pre-deinstall',
            'post-deinstall',
            'deinstall',
            'pre-upgrade',
            'post-upgrade',
            'upgrade',
        }
        bad_keys = [key for key in self.scripts if key not in valid_keys]
        if bad_keys:
            raise DistutilsOptionError('invalid scripts: {}'
                                       ''.format(', '.join(bad_keys)))
        bad_keys = [
            key for key, value in self.scripts.items()
            if not isinstance(value, str)
        ]
        if bad_keys:
            raise DistutilsOptionError('invalid scripts: {}'
                                       ''.format(', '.join(bad_keys)))

    def iter_install_files(self):
        for root, dirs, files in os.walk(self.install_dir):
            for file in files:
                reldir = os.path.relpath(root, self.install_dir)
                install_path = '/' + os.path.join(reldir, file)
                install_path = install_path.replace(self.prefix + '/lib64/',
                                                    self.prefix + '/lib/')
                yield os.path.join(root, file), install_path

    def maybe_remove_temp(self, path):
        if self.keep_temp:
            return
        if path is None:
            return
        if os.path.exists(path):
            shutil.rmtree(path)

    def maybe_rename_console_scripts(self, project):
        if not self.with_py_prefix:
            return
        if not project.entry_points:
            return
        console_scripts = project.entry_points.get('console_scripts')
        if console_scripts is None:
            return
        prefixed_console_scripts = []
        for script in console_scripts:
            name, callback = script.split('=')
            name = '{}{}.{}'.format(name.strip(), *sys.version_info[:2])
            prefixed_console_scripts.append('{} = {}'.format(
                name, callback.strip()))
        project.entry_points['console_scripts'] = prefixed_console_scripts

    def cut_changelog(self, desc):
        def match_changelog_header(line):
            words = re.findall(r'\b\w+\b', line.lower())
            if len(words) != 1:
                return True
            if 'changelog' in words or 'changes' in words:
                return False
            return True

        return '\n'.join(takewhile(match_changelog_header, desc.splitlines()))
예제 #44
0
class LocalPyPi(HTTPTestServer):
    """ Abstract class for creating a working dir and virtualenv,
        setting up a PyPi instance in a thread,
        and providing an api accessor.
    """
    username = '******'
    password = '******'
    hostname = '127.0.0.1'
    package_index = PackageIndex()

    def __init__(self, target_python=None, **kwargs):
        self.target_python = target_python or sys.executable
        super(LocalPyPi, self).__init__(**kwargs)

    def pre_setup(self):
        self.env = dict(os.environ)
        if "PYTHONPATH" in self.env:
            del self.env["PYTHONPATH"]

        existing_path = self.env.get("PATH")
        self.env["PATH"] = os.path.dirname(self.python)
        if existing_path:
            self.env["PATH"] = self.env["PATH"] + os.path.pathsep + existing_path

    def get_rc(self):
        """ return a ConfigParser rc for this instance
        """
        config = configparser.ConfigParser()
        config.add_section('server-login')
        config.set('server-login', 'repository', self.uri)
        config.set('server-login', 'username', self.username)
        config.set('server-login', 'password', self.password)
        return config

    def build_egg_from_source(self, pkg_location, output_to, python):
        try:
            temp = tempfile.mkdtemp()
            self.run(('%s setup.py bdist_egg --dist-dir=' + temp) % python,
                     cd=pkg_location, capture=True)
            files = os.listdir(temp)
            if len(files) != 1:
                raise RuntimeError("Error while generating egg file for: %s" % pkg_location)
            egg_file = os.path.join(temp, files[0])
            shutil.move(egg_file, output_to)
            return os.path.join(output_to, os.path.basename(egg_file))
        finally:
            shutil.rmtree(temp, ignore_errors=True)

    def create_egg_for_package(self, pkg_location, output_to, python):
        assert os.path.isdir(pkg_location)

        if pkg_location.endswith(".egg"):
            return rezip_egg(pkg_location, output_to)
        else:
            return self.build_egg_from_source(pkg_location, output_to, python)

    def upload_requirement(self, work_dir, req, python):
        dest_dir = self.get_file_dir(req.project_name).strip()
        if not os.path.exists(dest_dir):
            os.makedirs(dest_dir)

        def fetch_requirement(req, dest_dir, force_download):
            from setuptools.package_index import PackageIndex  # @Reimport
            from pkg_resources import working_set  # @Reimport  # NOQA
            i = PackageIndex()
            if force_download:
                [i.remove(i[req.key][0]) for _ in xrange(len(i[req.key]))]
                d = i.download(req, dest_dir)
            else:
                d = i.fetch_distribution(req, dest_dir, force_scan=True)
            d = getattr(d, 'location', d) if d else ''
            return (d if d else working_set.resolve([req])[0].location)
        with set_env(COVERAGE_DISABLE_WARNINGS="1"):
            fetched = run_in_subprocess(fetch_requirement, python=python, cd=self.workspace
                                        )(req, dest_dir, force_download=False)

            if not fetched or not os.path.exists(fetched):
                err_msg = "Unable to find requirement: %r\n%s" % (str(req), fetched)
                raise RuntimeError(err_msg)

            if os.path.isdir(fetched):
                fetched = self.create_egg_for_package(fetched, work_dir, python)

        print("Fetched %r" % fetched)
        return fetched

    def post_setup(self):
        """ Upload the dependencies for pkglib so dependent tools
            can bootstrap cwthemselves as well as run tests on
            generated packages
        """
        self.bootstrap_tagup(python=self.target_python)

    def bootstrap_tagup(self, python):
        work_dir = os.path.join(self.workspace, 'pkglib-deps')
        if not os.path.exists(work_dir):
            os.makedirs(work_dir)
        with open(os.path.join(work_dir, '.pypirc'), 'wt') as rc_file:
            self.get_rc().write(rc_file)

        # XXX find a better way to pass in credentials
        new_env = copy.copy(dict(os.environ))
        new_env['HOME'] = work_dir

        if "PYTHONPATH" in new_env:
            del new_env["PYTHONPATH"]

        def get_pkglib_reqs():
            from pkglib.setuptools.dependency import get_all_requirements
            return [(dist.project_name, dist.version)
                    for dist in get_all_requirements(['pkglib', 'pytest', 'pytest-cov'], ignore_explicit_builtins=True)
                    if dist.project_name not in ['virtualenv', 'setuptools']]
        for name, version in run_in_subprocess(get_pkglib_reqs, python=python, cd=self.workspace)():
            # Quick hack to get the built eggs into the test PyPi instance.
            # We register with an empty package file then copy the files in manually
            # We may need pip and distribute if virtualenv installed old versions.
            # (should only occur when upgrading to new virtualenv).
            with open(os.path.join(work_dir, 'setup.py'), 'wb') as fp:
                setup_py = SETUP_TMPL % {'name': name, 'version': version}
                fp.write(setup_py.encode('utf-8'))

            cmd = 'cd %s; %s setup.py register' % (work_dir, python)
            out = self.run(cmd, capture=True, env=new_env)

            logger.debug(out)
            assert '200' in out
            self.upload_requirement(work_dir, Requirement.parse('%s==%s' % (name, version)), python)
예제 #45
0
class InstallGcloudCommand(Command):
    """ Install Google Cloud SDK"""
    def initialize_options(self):
        self.win_gcloud_url = None
        self.win_gcloud_installer = None
        self.nix_gcloud_url = None
        self.silent = None
        self.curl = None
        self.bash = None
        self.package_index = None

    def finalize_options(self):

        if platform.system() != "Windows":
            self.curl = which('curl')
            self.bash = which('bash')
            self.gcloud_url = "https://sdk.cloud.google.com"
            self.silent = "--disable-prompts"
        else:
            self.silent = "/S"
            self.gcloud_url = "https://dl.google.com/dl/cloudsdk/channels/" \
                              "rapid/GoogleCloudSDKInstaller.exe"
        self.package_index = PackageIndex()

    # Copied from setuptools.command.easy_install.easy_install
    @contextlib.contextmanager
    def _tmpdir(self):
        tmpdir = tempfile.mkdtemp(prefix=six.u("install_gcloud-"))
        try:
            # cast to str as workaround for #709 and #710 and #712
            yield str(tmpdir)
        finally:
            os.path.exists(tmpdir) and rmtree(rmtree_safe(tmpdir))

    def run(self):
        warn_msg = "Please install the Google Cloud SDK manually:\n\t" \
                   "https://cloud.google.com/sdk/downloads"
        if platform.system() == "Windows":
            with self._tmpdir() as tmpdir:
                gcloud_install_cmd = \
                       self.package_index.download(self.gcloud_url, tmpdir)
                try:
                    output = subprocess.check_output([gcloud_install_cmd,
                                                      self.silent],
                                                     stderr=subprocess.STDOUT)
                    log.info(output.decode('utf-8'))
                except subprocess.CalledProcessError as cpe:
                    log.warn(u' '.join(cpe.cmd) + u":\n\t" +
                             cpe.output.decode('utf-8'))

        elif self.curl is not None and self.bash is not None:
            try:
                script = subprocess.check_output([self.curl, "-s", "-S",
                                                  self.gcloud_url],
                                                 stderr=subprocess.STDOUT)
                if script:
                    with self._tmpdir() as tmpdir:
                        gcloud_install_cmd = os.path.join(tmpdir,
                                                          'installer.sh')
                        with open(gcloud_install_cmd, 'w') as gcloud_install_fd:
                            gcloud_install_fd.write(script.decode('utf-8'))
                        output = subprocess.check_output([self.bash,
                                                          gcloud_install_cmd,
                                                          self.silent],
                                                         stderr=subprocess.STDOUT)
                        log.info(output.decode('utf-8'))
                else:
                    log.warn("Unable to download installer script. " + warn_msg)
            except subprocess.CalledProcessError as cpe:
                log.warn(u' '.join(cpe.cmd) + u":\n\t" +
                         cpe.output.decode('utf-8'))
        else:
            log.warn("Unable to find curl and/or bash. " + warn_msg)

    def get_inputs(self):
        return []

    def get_outputs(self):
        return []
예제 #46
0
파일: web.py 프로젝트: Malex/pyg
 def __init__(self, *a, **k):
     PackageIndex.__init__(self, *a, **k)
     self.urls = set()
예제 #47
0
파일: utils.py 프로젝트: openalea/PkgIt
        formula_name_cap = formula_name[0].capitalize() + formula_name[1:]
        cmd_import = "from pkgit.formulas.%s import %s" %(formula_name,formula_name_cap)
        exec(cmd_import, globals(), locals())
    except ImportError:
        print
        print("Cannot find formula %s. Maybe it is misspelled" %formula_name)
        print("You can only use formulas: %s" %str(formulas())[1:-1])
        print
        raise
    # instanciate formula
    cmd_instanciate = "%s()" %formula_name_cap
    return eval(cmd_instanciate)	

__oldsyspath__ = sys.path[:]

'''
OPENALEA_PI = "http://openalea.gforge.inria.fr/pi"
OPENALEA_REPOLIST = "http://openalea.gforge.inria.fr/repolist"
def get_repo_list():
    """ Return the list of OpenAlea repository """
    import urllib
    try:
        ret = []
        u = urllib.urlopen(OPENALEA_REPOLIST)
        for i in u:
            ret.append(i.strip())
        return ret

    except Exception, e:
        print e
        return [OPENALEA_PI]
예제 #48
0
                    , 'Flask-OpenID>=1.0.1'
                    , 'webassets>=0.7.1'
                    ]

import os, shutil, sys
from pkg_resources import Requirement
from setuptools.package_index import PackageIndex

def die(message):
    sys.stderr.write(message)
    sys.exit(1)

if __name__ == '__main__':
    if len(sys.argv) < 2:
        print "Please supply a destination directory for the fetched packages!"
        sys.exit(1)
    dest_dir = os.path.abspath(sys.argv[1])

    print "Fetching packages:"
    pkg_index = PackageIndex()
    with open(os.path.join(dest_dir, 'install_order'), 'w') as fd:
        for pkg in required_packages:
            print "(---  Processing requirement '{0}'".format(pkg)
            dist = pkg_index.fetch_distribution(Requirement.parse(pkg),
                                                dest_dir, True, True)
            if dist is None:
                die("Couldn't find package satisfying '{0}'!".format(pkg))
            print " ---) Fetched {0} {1}".format(dist.project_name,
                    dist.version)
            fd.write(os.path.basename(dist.location) + '\n')
예제 #49
0
 def __init__(self, index_url):
   PackageIndex.__init__(self, index_url)
   self.platform = None
   # It is necessary blow away local caches in order to not pick up stuff in site-packages.
   self._distmap = {}
   self._cache = {}