Exemplo n.º 1
0
def find_install_requires():
    """Return a list of dependencies and non-pypi dependency links.

    A supported version of tensorflow and/or tensorflow-gpu is required. If not
    found, then tensorflow is added to the install_requires list.

    Depending on the version of tensorflow found or installed, either
    keras-contrib or tensorflow-addons needs to be installed as well.
    """

    install_requires = [
        'sktime==0.4.1',
        'h5py>=2.8.0',
        'matplotlib',
        'seaborn',
    ]

    # tensorflow version requirements
    # by default, make sure anything already installed is above 1.8.0,
    # or if installing from new get the most recent stable (i.e. not
    # nightly) version
    MINIMUM_TF_VERSION = '1.9.0'
    tf_requires = 'tensorflow>=' + MINIMUM_TF_VERSION

    has_tf_gpu = False
    has_tf = False
    tf = working_set.find(Requirement.parse('tensorflow'))
    tf_gpu = working_set.find(Requirement.parse('tensorflow-gpu'))

    if tf is not None:
        has_tf = True
        tf_version = tf._version

    if tf_gpu is not None:
        has_tf_gpu = True
        tf_gpu_version = tf_gpu._version

    if has_tf_gpu and not has_tf:
        # have -gpu only (1.x), make sure it's above 1.9.0
        # Specify tensorflow-gpu version if it is already installed.
        tf_requires = 'tensorflow-gpu>=' + MINIMUM_TF_VERSION

    install_requires.append(tf_requires)

    # tensorflow itself handled, now find out what add-on package to use
    if (not has_tf and not has_tf_gpu) or (has_tf and tf_version >= '2.1.0'):
        # tensorflow will be up-to-date enough to use most recent
        # tensorflow-addons, the replacement for keras-contrib
        install_requires.append('tensorflow-addons')
    else:
        # fall back to keras-contrib, not on pypi so need to install it
        # separately not printing. TODO
        print(
            'Existing version of tensorflow older than version 2.1.0 '
            'detected. You shall need to install keras-contrib (for tf.keras) '
            'in order to use all the features of sktime-dl. '
            'See https://github.com/keras-team/keras-contrib#install-keras_contrib-for-tensorflowkeras'
        )

    return install_requires
Exemplo n.º 2
0
 def find_dependencies(dist):
     dependencies = [working_set.find(i) for i in dist.requires()]
     my_requirements =  dist.requires()
     #we want the subset of stuff in the basket we actually depend on, not just the basket itself
     basket_requirements = [i for i in my_requirements
                            if i.extras]
     for basket in basket_requirements:
         dependencies.extend([working_set.find(Requirement.parse(i)) for i in basket.extras])
     return dependencies
Exemplo n.º 3
0
Arquivo: eggs.py Projeto: diopib/reahl
    def read_from_distributions(self, distributions):
        graph = {}
        for dist in distributions:
            dependencies = [working_set.find(i) for i in dist.requires()]
            my_requirements = dist.requires()
            #we want the subset of stuff in the basket we actually depend on, not just the basket itself
            basket_requirements = [i for i in my_requirements if i.extras]
            for basket in basket_requirements:
                dependencies.extend([
                    working_set.find(Requirement.parse(i))
                    for i in basket.extras
                ])

            graph[dist] = dependencies
        return graph
    def provide_dependencies(self):
        """Add paths for resolution from inner buildout's Environment.

        These paths tend to vary according to the way anybox.recipe.odoo has
        been installed, notably there's a big difference between issueing in a
        virtualenv ``python setup.py develop`` and
        ``python setup.py develop --upgrade```, the latter putting the
        dependencies into separate egg directories below the virtualenv's
        ``site-packages`` while the former installs straight into
        ``site-packages``.

        For now we monkey-patch the path lists that ``zc.buildout`` uses
        to provide setuptools in all cases, based on a list of all needed
        dependencies, meaning that this list will have to be maintained.

        Alternatives to consider :

        * inject the whole current ``sys.path``
        * play with the sub-buildout's ``find-links`` options

        The monkey-patching in itself is tolerable in that the main purpose of
        these integration tests is to report about the impact of internal
        changes to to zc.buildout anyway.
        """

        autopath = buildout_and_setuptools_path
        self.autopath_original = autopath[:]
        forward_projects = [
            'pip', 'zc.buildout', 'zc.recipe.egg', 'anybox.recipe.odoo'
        ]
        if sys.version_info < (2, 7):
            forward_projects.extend(('argparse', 'ordereddict'))
        autopath.extend(
            working_set.find(Requirement.parse(project)).location
            for project in forward_projects)
    def test_load(self):
        """
        Verify that a plugin can be loaded successfully through a pkg_resources
        entry point.
        """
        logging.debug('')
        logging.debug('test_load')

        # make sure we're looking in the right spot for the plugins whether
        # we're in a develop egg or in the released version
        dist = working_set.find(Requirement.parse('openmdao.test'))
        fact = PkgResourcesFactory(['openmdao.component'],
                                   [os.path.join(dist.location,
                                                 'openmdao',
                                                 'test','plugins')],
                                   )
        
        comp = fact.create('testplugins.components.dumb.DumbComponent')
        logging.debug('    loaders:')
        for key, value in fact._loaders.items():
            logging.debug('        %s:', key)
            for val in value:
                logging.debug('                name: %s', val.name)
                logging.debug('               group: %s', val.group)
                logging.debug('                dist: %s', val.dist)
                logging.debug('            entry_pt: %s', val.entry_pt)
                logging.debug('                ctor: %s', val.ctor)
                logging.debug('')

        self.assertEqual(comp.svar,'abcdefg')
        comp.run()
        self.assertEqual(comp.svar,'gfedcba')
    def provide_dependencies(self):
        """Add paths for resolution from inner buildout's Environment.

        These paths tend to vary according to the way anybox.recipe.odoo has
        been installed, notably there's a big difference between issueing in a
        virtualenv ``python setup.py develop`` and
        ``python setup.py develop --upgrade```, the latter putting the
        dependencies into separate egg directories below the virtualenv's
        ``site-packages`` while the former installs straight into
        ``site-packages``.

        For now we monkey-patch the path lists that ``zc.buildout`` uses
        to provide setuptools in all cases, based on a list of all needed
        dependencies, meaning that this list will have to be maintained.

        Alternatives to consider :

        * inject the whole current ``sys.path``
        * play with the sub-buildout's ``find-links`` options

        The monkey-patching in itself is tolerable in that the main purpose of
        these integration tests is to report about the impact of internal
        changes to to zc.buildout anyway.
        """

        autopath = buildout_and_setuptools_path
        self.autopath_original = autopath[:]
        forward_projects = ['pip', 'zc.buildout', 'zc.recipe.egg',
                            'anybox.recipe.odoo']
        if sys.version_info < (2, 7):
            forward_projects.extend(('argparse', 'ordereddict'))
        autopath.extend(working_set.find(Requirement.parse(project)).location
                        for project in forward_projects)
    def test_get_loaders(self):
        """test retrieval of loaders"""
        # Get a list of entry point loaders for the openmdao.dumbplugins 
        # group.       
        dist = working_set.find(Requirement.parse('openmdao.test'))
        fact = PkgResourcesFactory(['openmdao.dumbplugins'],
                                   [os.path.join(dist.location,
                                                 'openmdao',
                                                 'test','plugins')])
        # first, look for active loaders. list should be empty
        dumb_loaders = fact.get_loaders('openmdao.dumbplugins')
        self.assertEqual(len(dumb_loaders), 0)
        
        # now, get all of the loaders, including inactive ones
        dumb_loaders = fact.get_loaders('openmdao.dumbplugins', active=False)
        self.assertEqual(len(dumb_loaders), 6)
        self.assertEqual(dumb_loaders[0].name, 'bar.Comp1Plugin')
        self.assertEqual(dumb_loaders[0].dist.version, '1.0')
        self.assertEqual(dumb_loaders[0].dist.project_name, 'bar')
        self.assertEqual(dumb_loaders[0].ctor, None)

        # now, create a plugin object, which will make its loader active
        comp = fact.create('bar.Comp1Plugin')
        self.assertEqual(comp.version, '1.0')
        
        # now there should be one active loader (its ctor should not be None)
        dumb_loaders = fact.get_loaders('openmdao.dumbplugins')
        self.assertEqual(len(dumb_loaders), 1)
        self.assertEqual(dumb_loaders[0].name, 'bar.Comp1Plugin')
        self.assertEqual(dumb_loaders[0].dist.version, '1.0')
        self.assertEqual(dumb_loaders[0].dist.project_name, 'bar')
        mybar = dumb_loaders[0].create(None)
        self.assertEqual(mybar.version, '1.0')
Exemplo n.º 8
0
    def __call__(self, parser, namespace, values, option_string=None):
        rt_pkg_name = getattr(parser, ATTR_ROOT_PKG, None)
        results = []
        if rt_pkg_name:
            # We can use this directly as nothing else should be cached
            # where this is typically invoked.
            # XXX actually, if the argparser is actually dumb and won't
            # do exiting on its own with its other _default_ Actions
            # I could just return this as a flag and then let the caller
            # (i.e. the run time) figure this information out and do the
            # appropriate output...
            dist = default_working_set.find(Requirement.parse(rt_pkg_name))
            results.append('%s %s from %s' % self.get_dist_info(dist))
            results.append(os.linesep)

        rt_dist = getattr(parser, ATTR_RT_DIST, None)
        if rt_dist:
            results.append(parser.prog +
                           ': %s %s from %s' % self.get_dist_info(rt_dist))
            results.append(os.linesep)

        if not results:
            results = ['no package information available.']
        # I'd rather return the results than just exiting outright, but
        # remember the bugs that will make an error happen otherwise...
        # quit early so they don't bug.
        for i in results:
            sys.stdout.write(i)
        sys.exit(0)
Exemplo n.º 9
0
def get_cache_dir():
    # path = os.path.abspath(sys.modules[KB.__module__].__file__)
    distrib = working_set.find(Requirement.parse('zincbase'))
    path = distrib.location + '/zincbase/.cache/'
    # path = '/'.join(path.split('/')[:-1]) + '/.cache/'
    if not os.path.isdir(path):
        os.mkdir(path)
    return path
Exemplo n.º 10
0
def eval_pcap(pcap, labels, time_const, label=None, rnn_size=100, model_path='/models/OneLayerModel.pkl', model_type='RandomForest'):
    logger = logging.getLogger(__name__)
    try:
        if 'LOG_LEVEL' in os.environ and os.environ['LOG_LEVEL'] != '':
            logger.setLevel(os.environ['LOG_LEVEL'])
    except Exception as e:
        logger.error(
            'Unable to set logging level because: {0} defaulting to INFO.'.format(str(e)))
    data = create_dataset(pcap, time_const, label=label,
                          model_path=model_path, model_type=model_type)
    # Create an iterator
    iterator = BatchIterator(
        data,
        labels,
        perturb_types=['random data']
    )
    logger.debug('Created iterator')
    rnnmodel = SoSModel(rnn_size=rnn_size, label_size=len(labels))
    logger.debug('Created model')
    rnnmodel.load(os.path.join(working_set.find(Requirement.parse(
        'poseidonml')).location, 'poseidonml/models/SoSmodel'))
    logger.debug('Loaded model')

    X_list = iterator.X
    L_list = iterator.L
    sessions = iterator.sessions

    num_total = 0
    max_score = 0
    scores = {}
    for i, X in enumerate(X_list):
        L = L_list[i]
        out = rnnmodel.get_output(
            np.expand_dims(X, axis=0),
            np.expand_dims(L, axis=0),
        )
        for j, o in enumerate(out):
            for k, s in enumerate(o):
                num_total += 1
                session = sessions[i][k]['session info']
                p = session['protocol']
                if p == '06':
                    p = 'TCP'
                if p == '17':
                    p = 'UDP'
                if p == '01':
                    p = 'ICMP'
                flowlike = p + ' '
                if session['initiated by source']:
                    flowlike += session['source']+' to '+session['destination']
                else:
                    flowlike += session['destination']+' to '+session['source']
                scores[num_total] = str(s)
                if s > max_score:
                    max_score = s

    logger.info(max_score)
    return max_score
Exemplo n.º 11
0
 def load_sdk_version(self):
     if self.product_version:
         # Product version already set, do nothing
         return
     # TODO: Even if multiple SDKs are loaded, version is collected only
     # from the vsphere client bindings wheel/distribution. Might need to
     # support multiple SDKs in the future
     sdk_dist = working_set.find(
         Requirement.parse(self.VSHPERE_SDK_DIST_NAME))
     if sdk_dist:
         self.set_product_info('SDK', sdk_dist.version)
Exemplo n.º 12
0
def get_version_using_pkgresources(module  # type: ModuleType
                                   ):
    # type: (...) -> str
    """
    Gets the version from the package info using `pkg_resources` (from `setuptools`)
    This works even for packages installed with `pip install -e`

    Note: this is probably PEP345 https://www.python.org/dev/peps/pep-0345/

    In case there is an old local `.egg-info` in the package folder, this method may return the wrong version
    number. For this reason an error is raised in that case.

    In case the location of the found distribution is not the same than the one in the package, an error is also raised.

    :param module:
    :return:
    """
    # this is part of setuptools
    from pkg_resources import working_set, Requirement  # get_distribution require, Distribution

    # First get the distribution

    # NO WAY ! `require` will fail in case of locally installed dependencies version conflict, which happens often
    # pkg_dist = require(pkg.__name__)[0]

    # WORKS BUT SLOW WHEN NOT FOUND because it ends up calling 'require'
    # pkg_dist = get_distribution(module.__name__)  # module.__name

    # MUCH FASTER !!! because in case of failure it does not try to do a 'require'
    pkg_dist = working_set.find(Requirement.parse(module.__name__))

    # DOES NOT WORK
    # pkg_dist = get_provider(module.__name__)

    # DOES NOT WORK
    # pkg_dist = Distribution.from_filename(module.__file__)

    if pkg_dist is not None:
        if Path(pkg_dist.location).resolve() != Path(
                join(dirname(module.__file__), pardir)).resolve():
            raise Exception(
                "Another distribution of the same package (with version '%s') is installed, but is not the "
                "one that was imported" % pkg_dist.version)

        # PROTECTION: if there is an old egg-info in the folder, the version will be that one, even if not installed!
        if exists(join(pkg_dist.location, module.__name__ + ".egg-info")):
            raise Exception(
                "There is a '%s' folder in the package location so it seems to be a source project "
                "that is not pip-installed. pkg_resources will therefore be ignored "
                "to find the version" % (module.__name__ + ".egg-info"))

        # Finally return the version number
        return pkg_dist.version
Exemplo n.º 13
0
 def _check_deps(self, metadata):
     plugins = {k.__name__.replace('_', '-'): Distribution(
         os.path.dirname(k.__file__), None, k.__name__, k.__version__,
         None, SOURCE_DIST) for k in veles.__plugins__}
     failed_general = set()
     failed_veles = set()
     for rreq in metadata["requires"]:
         req = Requirement.parse(rreq)
         if req.project_name in plugins:
             if plugins[req.project_name] not in req:
                 failed_veles.add(req)
         else:
             try:
                 working_set.find(req).project_name
             except (AttributeError, VersionConflict):
                 failed_general.add(req)
     if len(failed_general) > 0:
         print("Unsatisfied package requirements:", file=sys.stderr)
         print(", ".join((str(f) for f in failed_general)), file=sys.stderr)
     if len(failed_veles):
         print("Unsatisfied VELES requirements:", file=sys.stderr)
         print(", ".join((str(f) for f in failed_veles)), file=sys.stderr)
    def test_load(self):
        # make sure we're looking in the right spot for the plugins whether
        # we're in a develop egg or in the released version
        dist = working_set.find(Requirement.parse('openmdao.test'))
        fact = PkgResourcesFactory(['openmdao.component'], None)

        comp = fact.create('openmdao.test.execcomp.ExecComp',
                           exprs=['x = a+1', 'y=b-2', 'z=x*2'])
        comp.a = 4
        comp.b = 2
        comp.run()
        self.assertEqual(comp.x, 5)
        self.assertEqual(comp.y, 0)
        self.assertEqual(comp.z, 10)
 def test_load(self):
     # make sure we're looking in the right spot for the plugins whether
     # we're in a develop egg or in the released version
     dist = working_set.find(Requirement.parse('openmdao.test'))
     fact = PkgResourcesFactory(['openmdao.component'], None)
     
     comp = fact.create('openmdao.test.execcomp.ExecComp', 
                        exprs=['x = a+1','y=b-2','z=x*2'])
     comp.a = 4
     comp.b = 2
     comp.run()
     self.assertEqual(comp.x, 5)
     self.assertEqual(comp.y, 0)
     self.assertEqual(comp.z, 10)
Exemplo n.º 16
0
    def build(self):
        if not self.vapi_version:
            vapi_runtime_dist = working_set.find(
                Requirement.parse('vapi-runtime'))
            self.vapi_version = vapi_runtime_dist.version if vapi_runtime_dist else ''
        python_version = platform.python_version()
        # platform.uname() returns (system, node, release, version, machine, processor)
        (os_name, _, os_version, _, os_arch, _) = platform.uname()

        self.user_agent = "vAPI/%s Python/%s (%s; %s; %s)" % (
            self.vapi_version, python_version, os_name, os_version, os_arch)
        if self.product_version:
            self.user_agent = "%s %s" % (self.product_version, self.user_agent)

        return self.user_agent.strip()
Exemplo n.º 17
0
def getDependencyInfosForDeferred():
    """
    Return dictionary with lists of configuration files
    for the dependencies of the deferred eggs.
    """
    deferred = ['zc.table', 'hurry.workflow']
    # XXX: Assuming that all dependencies should be autoincluded
    # will probably get us into trouble, but let's see how big trouble.
    # *zc.table* is an example of a dependency, whose *configure.zcml*
    # will not run in Plone environment.
    # Some better solution than just a blacklist would be welcome.
    from sauna.reload import reload_paths
    zcml_to_look_for = ('meta.zcml', 'configure.zcml', 'overrides.zcml')
    deps = ZCMLInfo(zcml_to_look_for)
    for ep in iter_entry_points('z3c.autoinclude.plugin'):
        if ep.module_name == DEFERRED_TARGET:
            deferred.append(ep.dist.project_name)
            # XXX: We cannot call DependencyFinder(ep.dist).includableInfo,
            # because it eventually imports also our development packages
            # while resolving existence of *zcml_to_look_for*.
            finder = DependencyFinder(ep.dist)
            info = ZCMLInfo(zcml_to_look_for)
            for req in finder.context.requires():
                # Skip missing and deferred requirements
                dist = ws.find(req)  # find req from the current working set
                if dist is None or dist.location in reload_paths:
                    continue
                # Resolve ZCMLs to be loaded for the other requirements
                dist_manager = DistributionManager(get_provider(req))
                for dotted_name in dist_manager.dottedNames():
                    try:
                        module = resolve(dotted_name)
                    except ImportError:
                        continue
                    for candidate in zcml_to_look_for:
                        try:
                            candidate_path = os.path.join(
                                os.path.dirname(module.__file__), candidate)
                        except AttributeError:
                            continue
                        if os.path.isfile(candidate_path):
                            info[candidate].append(dotted_name)
            for key in deps:
                deps[key].extend(info.get(key, []))
    for key in deps:
        deps[key] = set([n for n in deps[key] if n not in deferred])
    return deps
Exemplo n.º 18
0
    def register(self, widget, modname, variant_mapping):
        if isinstance(modname, Requirement):
            modname = os.path.basename(working_set.find(modname).location)

        if isinstance(variant_mapping, basestring):
            variant_mapping = {self.DEFAULT_VARIANT : variant_mapping}

        # alternatively, we might consider creating
        # a DEFAULT_VARIANT-entry based on some rule
        assert self.DEFAULT_VARIANT in variant_mapping, "You **must** have %s as part of a varianted resource filename! This is given: %r" % (self.DEFAULT_VARIANT, variant_mapping)

        url_mapping = {}
        self._lock.acquire()
        # this assumes that *no* widgets
        # are created dynamically!
        # A constraint I think that's
        # imposable, but we might think of
        # adding a "dynamic"-parameter
        # or some such that prevents inclusion
        # here (it's of no use anyway)
        try:
            if widget not in self._widgets:
                self._widgets.append(widget)
        finally:
            self._lock.release()
        for variant, filename in variant_mapping.iteritems():
            filename = variant_mapping[variant]
            basename = os.path.basename(filename)
            dirname = os.path.dirname(filename)
            parts = ['', modname] + filter(None, dirname.split('/'))
            webdir = '/'.join(parts)

            self._lock.acquire()
            try:
                if not self._is_registered(webdir, dirname, variant):
                    heapq.heappush(self._dirs.setdefault(variant, []), (len(webdir), (webdir, dirname)))
                    log.debug("Registered %s at %s", dirname, webdir)
            finally:
                self._lock.release()
            url_mapping[variant] = '/'.join([self.prefix, webdir.strip('/'), basename])


        url = VariantedUrl(url_mapping)

        return webdir, dirname, url
Exemplo n.º 19
0
    def register(self, modname, filename):
        if isinstance(modname, Requirement):
            modname = os.path.basename(working_set.find(modname).location)

        basename = os.path.basename(filename)
        dirname = os.path.dirname(filename)
        parts = ['', modname] + filter(None, dirname.split('/'))
        webdir = '/'.join(parts)

        self._lock.acquire()
        try:
            if not self._is_registered(webdir, dirname):
                heapq.heappush(self._dirs, (len(webdir), (webdir, dirname)))
                log.debug("Regsitered %s at %s", dirname, webdir)
        finally:
            self._lock.release()
        url = '/'.join([self.prefix, webdir.strip('/'), basename])
        return webdir, dirname, url
Exemplo n.º 20
0
    def add_files(self):
        r"""
        Checks for the flags '--all' and '--sagenb'.

        For each one present, this function adds the appropriate directories and files to the todo list.

        EXAMPLES::

            sage: from sage.doctest.control import DocTestDefaults, DocTestController
            sage: from sage.env import SAGE_SRC
            sage: import os
            sage: log_location = os.path.join(SAGE_TMP, 'control_dt_log.log')
            sage: DD = DocTestDefaults(all=True, logfile=log_location)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()
            Doctesting entire Sage library.
            sage: os.path.join(SAGE_SRC, 'sage') in DC.files
            True

        ::

            sage: DD = DocTestDefaults(sagenb = True)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()
            Doctesting the Sage notebook.
            sage: DC.files[0][-6:]
            'sagenb'
        """
        opj = os.path.join
        from sage.env import SAGE_SRC, SAGE_DOC_SRC, SAGE_ROOT
        def all_files():
            from glob import glob
            self.files.append(opj(SAGE_SRC, 'sage'))
            self.files.append(SAGE_DOC_SRC)
            self.options.sagenb = True
        if self.options.all:
            self.log("Doctesting entire Sage library.")
            all_files()
        if self.options.sagenb:
            if not self.options.all:
                self.log("Doctesting the Sage notebook.")
            from pkg_resources import Requirement, working_set
            sagenb_loc = working_set.find(Requirement.parse('sagenb')).location
            self.files.append(opj(sagenb_loc, 'sagenb'))
 def test_load_version(self):
     """load a specific version, then try to load a conflicting version"""
     
     dist = working_set.find(Requirement.parse('openmdao.test'))
     fact = PkgResourcesFactory(['openmdao.dumbplugins'],
                                [os.path.join(dist.location,
                                              'openmdao','test',
                                              'plugins')])
     foo = fact.create('foo.Comp1Plugin', version='1.0')
     self.assertEqual(foo.version, '1.0')
     
     # now try to create an object that requires a conflicting version of foo
     self.assertRaises(VersionConflict,
                       fact.create,'foo.Comp1Plugin',
                       version='1.4')
     
     # now request a non-existent version of foo
     foo10 = fact.create('foo.Comp1Plugin', version='10.5')
     self.assertEqual(foo10, None)
Exemplo n.º 22
0
    def test_module_loader_registry_multiple_loaders(self):
        working_set = WorkingSet({
            'calmjs.module': [
                'module4 = calmjs.testing.module4',
            ],
            'calmjs.module.loader': [
                'css = css[style,css]',
                'json = json[json]',
                'empty = empty[]',
            ],
            __name__: [
                'calmjs.module = calmjs.module:ModuleRegistry',
                'calmjs.module.loader = '
                'calmjs.loaderplugin:ModuleLoaderRegistry',
            ]},
            # use a real distribution instead for this case
            dist=root_working_set.find(Requirement.parse('calmjs')),
        )

        registry = ModuleRegistry('calmjs.module', _working_set=working_set)
        loader_registry = ModuleLoaderRegistry(
            'calmjs.module.loader', _working_set=working_set, _parent=registry)
        self.assertEqual({
            'calmjs': ['calmjs.testing.module4'],
        }, loader_registry.package_module_map)

        self.assertEqual(
            ['css', 'empty', 'json'],
            sorted(loader_registry.get_loaders_for_package('calmjs'))
        )

        self.assertEqual([
            'css!calmjs/testing/module4/other.css',
            'css!calmjs/testing/module4/widget.style',
            'json!calmjs/testing/module4/data.json',
        ], sorted(loader_registry.get_records_for_package('calmjs').keys()))

        # was not registered to calmjs.testing
        self.assertEqual([], loader_registry.get_loaders_for_package(
            'calmjs.testing'))
        self.assertEqual({}, loader_registry.get_records_for_package(
            'calmjs.testing'))
Exemplo n.º 23
0
def getDependencyInfosForDeferred():
    """
    Return dictionary with lists of configuration files
    for the dependencies of the deferred eggs.
    """
    deferred = ['zc.table', 'hurry.workflow']
    # XXX: Assuming that all dependencies should be autoincluded
    # will probably get us into trouble, but let's see how big trouble.
    # *zc.table* is an example of a dependency, whose *configure.zcml*
    # will not run in Plone environment.
    # Some better solution than just a blacklist would be welcome.
    from sauna.reload import reload_paths
    zcml_to_look_for = ('meta.zcml', 'configure.zcml', 'overrides.zcml')
    deps = ZCMLInfo(zcml_to_look_for)
    for ep in iter_entry_points('z3c.autoinclude.plugin'):
        if ep.module_name == DEFERRED_TARGET:
            deferred.append(ep.dist.project_name)
            # XXX: We cannot call DependencyFinder(ep.dist).includableInfo,
            # because it eventually imports also our development packages
            # while resolving existence of *zcml_to_look_for*.
            finder = DependencyFinder(ep.dist)
            info = ZCMLInfo(zcml_to_look_for)
            for req in finder.context.requires():
                # Skip missing and deferred requirements
                dist = ws.find(req)  # find req from the current working set
                if dist is None or dist.location in reload_paths:
                    continue
                # Resolve ZCMLs to be loaded for the other requirements
                dist_manager = DistributionManager(get_provider(req))
                for dotted_name in dist_manager.dottedNames():
                    module = resolve(dotted_name)
                    for candidate in zcml_to_look_for:
                        candidate_path = os.path.join(
                            os.path.dirname(module.__file__), candidate)
                        if os.path.isfile(candidate_path):
                            info[candidate].append(dotted_name)
            for key in deps:
                deps[key].extend(info.get(key, []))
    for key in deps:
        deps[key] = set([n for n in deps[key] if n not in deferred])
    return deps
Exemplo n.º 24
0
    def __call__(self, parser, namespace, values, option_string=None):
        """
        Invoke to get version.
        """

        # I really do not like this implementation, but under Python 2.7
        # argparser is broken with subcommands and it quits with too few
        # arguments too soon.

        # Related issues:
        # http://bugs.python.org/issue9253#msg186387
        # http://bugs.python.org/issue10424
        rt_pkg_name = getattr(parser, ATTR_ROOT_PKG, None)
        results = []
        if rt_pkg_name:
            # We can use this directly as nothing else should be cached
            # where this is typically invoked.
            # if the argparser is dumber (which makes it smarter) and
            # doesn't have code that randomly call exit on its own with
            # other _default_ Actions it provides, a flag could just
            # simply be marked and/or returned to inform the caller
            # (i.e. the run time) to handle that.
            dist = default_working_set.find(Requirement.parse(rt_pkg_name))
            results.append('%s %s from %s' % self.get_dist_info(dist))
            results.append(linesep)

        infos = getattr(parser, ATTR_INFO, [])
        for info in infos:
            prog, rt_dist = info
            results.append(prog +
                           ': %s %s from %s' % self.get_dist_info(rt_dist))
            results.append(linesep)

        if not results:
            results = ['no package information available.']
        # I'd rather return the results than just exiting outright, but
        # remember the bugs that will make an error happen otherwise...
        # quit early so they don't bug.
        for i in results:
            sys.stdout.write(i)
        sys.exit(0)
Exemplo n.º 25
0
    def __call__(self, parser, namespace, values, option_string=None):
        """
        Invoke to get version.
        """

        # I really do not like this implementation, but under Python 2.7
        # argparser is broken with subcommands and it quits with too few
        # arguments too soon.

        # Related issues:
        # http://bugs.python.org/issue9253#msg186387
        # http://bugs.python.org/issue10424
        rt_pkg_name = getattr(parser, ATTR_ROOT_PKG, None)
        results = []
        if rt_pkg_name:
            # We can use this directly as nothing else should be cached
            # where this is typically invoked.
            # if the argparser is dumber (which makes it smarter) and
            # doesn't have code that randomly call exit on its own with
            # other _default_ Actions it provides, a flag could just
            # simply be marked and/or returned to inform the caller
            # (i.e. the run time) to handle that.
            dist = default_working_set.find(Requirement.parse(rt_pkg_name))
            results.append('%s %s from %s' % self.get_dist_info(dist))
            results.append(linesep)

        infos = getattr(parser, ATTR_INFO, [])
        for info in infos:
            prog, rt_dist = info
            results.append(
                prog + ': %s %s from %s' % self.get_dist_info(rt_dist))
            results.append(linesep)

        if not results:
            results = ['no package information available.']
        # I'd rather return the results than just exiting outright, but
        # remember the bugs that will make an error happen otherwise...
        # quit early so they don't bug.
        for i in results:
            sys.stdout.write(i)
        sys.exit(0)
Exemplo n.º 26
0
    def test_module_loader_registry_multiple_loaders(self):
        working_set = WorkingSet(
            {
                'calmjs.module': [
                    'module4 = calmjs.testing.module4',
                ],
                'calmjs.module.webpackloader': [
                    'style!css = css[css]',
                    'json = json[json]',
                ],
                __name__: [
                    'calmjs.module = calmjs.module:ModuleRegistry',
                    'calmjs.module.webpackloader = '
                    'calmjs.webpack.loaderplugin:WebpackModuleLoaderRegistry',
                ]
            },
            # use a real distribution instead for this case
            dist=root_working_set.find(Requirement.parse('calmjs')),
        )

        registry = ModuleRegistry('calmjs.module', _working_set=working_set)
        loader_registry = WebpackModuleLoaderRegistry(
            'calmjs.module.webpackloader',
            _working_set=working_set,
            _parent=registry)
        self.assertEqual({
            'calmjs': ['calmjs.testing.module4'],
        }, loader_registry.package_module_map)

        self.assertEqual(
            ['json', 'style!css'],
            sorted(loader_registry.get_loaders_for_package('calmjs')))

        self.assertEqual([
            WebpackModuleLoaderRegistryKey(
                loader='json', modname='calmjs/testing/module4/data.json'),
            WebpackModuleLoaderRegistryKey(
                loader='style!css',
                modname='calmjs/testing/module4/other.css'),
        ], sorted(loader_registry.get_records_for_package('calmjs').keys()))
Exemplo n.º 27
0
    def __init__(self, args=None, conf=None):
        """
        Create an application instance.

        :param dict args: preprocessed command line parameters
        """
        self.hook = HookCollection(
            init='madgui.core.app.init')
        self.args = args
        self.conf = conf
        self.dist = working_set.find(Requirement.parse('madgui'))
        self.add_entry_points(self.entry_points)
        # Add all entry point maps (strings like `App.entry_point` above) that
        # are registered under 'madgui.entry_points'. This indirection renders
        # the plugin mechanism more dynamic and allows plugins to be defined
        # more easily by eliminating the need to execute 'setup.py' each time
        # an entrypoint is added, changed or removed. Instead, their setup
        # step only needs to create a single entrypoint which is less likely
        # to change.
        for ep in iter_entry_points('madgui.entry_points'):
            self.add_entry_points(ep.load())
        super(App, self).__init__(redirect=False)
Exemplo n.º 28
0
def get_deferred_deps_info():
    """Return dictionary with lists of configuration files for the dependencies
    of the deferred eggs"""
    deferred = ["zc.table"]
    # FIXME: Assuming that all dependencies should be autoincluded will
    # probably get us into trouble, but let's see how big trouble. ``zc.table``
    # is an example of a dependency, whose ``configure.zcml`` will not run in
    # Plone environment. Some better solution than just a blacklist would be
    # welcome.
    from sauna.reload import reload_paths
    zcml_to_look_for = ("meta.zcml", "configure.zcml", "overrides.zcml")
    deps = ZCMLInfo(zcml_to_look_for)
    for ep in iter_entry_points("z3c.autoinclude.plugin"):
        if ep.module_name == DEFERRED_TARGET:
            deferred.append(ep.dist.project_name)
            # XXX: We cannot call DependencyFinder(ep.dist).includableInfo,
            # because it eventually imports also our development packages while
            # resolving existence of ``zcml_to_look_for``.
            finder = DependencyFinder(ep.dist)
            info = ZCMLInfo(zcml_to_look_for)
            for req in finder.context.requires():
                # Skip missing and deferred requirements
                dist = ws.find(req)  # find req from the current working set
                if dist is None or dist.location in reload_paths:
                    continue
                # Resolve ZCMLs to be loaded for the other requirements
                dist_manager = DistributionManager(get_provider(req))
                for dotted_name in dist_manager.dottedNames():
                    module = resolve(dotted_name)
                    for candidate in zcml_to_look_for:
                        candidate_path = os.path.join(
                            os.path.dirname(module.__file__), candidate)
                        if os.path.isfile(candidate_path):
                            info[candidate].append(dotted_name)
            for key in deps:
                deps[key].extend(info.get(key, []))
    for key in deps:
        deps[key] = set([n for n in deps[key] if n not in deferred])
    return deps
Exemplo n.º 29
0
def _pkg_sphinx_info(startdir, pkg, outfile, show_undoc=False,
                    underline='-'):
    """Generate Sphinx autodoc directives for all of the modules in
    the given package.

    """
    # locate the package directory
    topdir = pkg
    pkgdir = pkg

    dist = working_set.find(Requirement.parse(pkg))
    if dist is None:
        logging.error('no dist found for Requirement(%s)' % pkg)
    print >> outfile, 'Package %s' % pkg
    print >> outfile, underline*(len('Package ')+len(pkg))
    print >> outfile, '\n\n'

    __import__(pkg)
    mod = sys.modules[pkg]
    docs = mod.__doc__

    if docs:
        print >> outfile, docs, '\n'

    #excluding traits now since they need to be sorted separately
    #also excluding gui-related files, in case of non-gui build
    _names = list(_get_resource_files(dist,
                                    ['*__init__.py', '*setup.py', '*datatypes*.py',
                                     '*/main/zmq*.py', '*/main/tornado*.py',
                                     '*/gui/*/views.py', '*/gui/*/models.py',
                                     '*/gui/*/urls.py', '*/gui/*/admin.py'],
                                    ['*.py']))
    names = []
    for n in _names:
        parts = n.split('/')
        if parts[0] == 'openmdao' and parts[1] == 'test':
            if len(parts) > 2 and parts[2] != 'plugins':
                names.append(n)
        elif 'test' not in parts:
            names.append(n)

    names.sort()

    #wanted to sort traits separately based only on filenames despite differing paths
    traitz = list(_get_resource_files(dist,
                                      ['*__init__.py', '*setup.py', '*/test/*.py'],
                                      ['*/main/datatypes*.py', '*/lib/datatypes*.py']))
    sorted_traitz = sorted(traitz, cmp=_compare_traits_path)

    names.extend(sorted_traitz)

    exdirs = ['build', 'examples']

    oldheader = None
    newheader = None

    for name in names:
        if os.path.basename(name) == 'releaseinfo.py':
            continue

        for ex in exdirs:
            if name.startswith('%s/' % ex) or '/%s/' % ex in name:
                break
            else:
                x = name.split('/')
                #kind of dirty, but the other sections don't need api header.
                if os.path.basename(name) == 'api.py' and x[1] == 'lib':
                    newheader = 'api'
                if len(x) >= 4:
                    newheader =  x[2]
            if (oldheader != newheader):
                print >> outfile, '**%s**' % newheader.upper()
                print >> outfile, '_' * (4 + len(newheader)) + '\n'
                oldheader = newheader

        _mod_sphinx_info(name, outfile, show_undoc=show_undoc)
Exemplo n.º 30
0
    def add_files(self):
        r"""
        Checks for the flags '--all', '--new' and '--sagenb'.

        For each one present, this function adds the appropriate directories and files to the todo list.

        EXAMPLES::

            sage: from sage.doctest.control import DocTestDefaults, DocTestController
            sage: from sage.env import SAGE_SRC
            sage: import os
            sage: log_location = os.path.join(SAGE_TMP, 'control_dt_log.log')
            sage: DD = DocTestDefaults(all=True, logfile=log_location)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()
            Doctesting entire Sage library.
            sage: os.path.join(SAGE_SRC, 'sage') in DC.files
            True

        ::

            sage: DD = DocTestDefaults(new = True)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()
            Doctesting files changed since last HG commit.
            sage: len(DC.files) == len([L for L in hg_sage('status', interactive=False, debug=False)[0].split('\n') if len(L.split()) ==2 and L.split()[0] in ['M','A']])
            True

        ::

            sage: DD = DocTestDefaults(sagenb = True)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()
            Doctesting the Sage notebook.
            sage: DC.files[0][-6:]
            'sagenb'
        """
        opj = os.path.join
        from sage.env import SAGE_SRC as base
        if self.options.all:
            self.log("Doctesting entire Sage library.")
            from glob import glob
            self.files.append(opj(base, 'sage'))
            self.files.append(opj(base, 'doc', 'common'))
            self.files.extend(glob(opj(base, 'doc', '[a-z][a-z]')))
            self.options.sagenb = True
        elif self.options.new:
            self.log("Doctesting files changed since last HG commit.")
            import sage.all_cmdline
            from sage.misc.hg import hg_sage
            for X in hg_sage('status', interactive=False, debug=False)[0].split('\n'):
                tup = X.split()
                if len(tup) != 2: continue
                c, filename = tup
                if c in ['M','A']:
                    filename = opj(base, filename)
                    self.files.append(filename)
        if self.options.sagenb:
            if not self.options.all:
                self.log("Doctesting the Sage notebook.")
            from pkg_resources import Requirement, working_set
            sagenb_loc = working_set.find(Requirement.parse('sagenb')).location
            self.files.append(opj(sagenb_loc, 'sagenb'))
Exemplo n.º 31
0
    def add_files(self):
        r"""
        Checks for the flags '--all', '--new' and '--sagenb'.

        For each one present, this function adds the appropriate directories and files to the todo list.

        EXAMPLES::

            sage: from sage.doctest.control import DocTestDefaults, DocTestController
            sage: from sage.env import SAGE_SRC
            sage: import os
            sage: log_location = os.path.join(SAGE_TMP, 'control_dt_log.log')
            sage: DD = DocTestDefaults(all=True, logfile=log_location)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()
            Doctesting entire Sage library.
            sage: os.path.join(SAGE_SRC, 'sage') in DC.files
            True

        ::

            sage: DD = DocTestDefaults(new = True)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()
            Doctesting ...

        ::

            sage: DD = DocTestDefaults(sagenb = True)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()
            Doctesting the Sage notebook.
            sage: DC.files[0][-6:]
            'sagenb'
        """
        opj = os.path.join
        from sage.env import SAGE_SRC, SAGE_ROOT
        def all_files():
            from glob import glob
            self.files.append(opj(SAGE_SRC, 'sage'))
            self.files.append(opj(SAGE_SRC, 'sage_setup'))
            self.files.append(opj(SAGE_SRC, 'doc', 'common'))
            self.files.extend(glob(opj(SAGE_SRC, 'doc', '[a-z][a-z]')))
            self.options.sagenb = True
        DOT_GIT= opj(SAGE_ROOT, '.git')
        have_git = os.path.exists(DOT_GIT)
        if self.options.all or (self.options.new and not have_git):
            self.log("Doctesting entire Sage library.")
            all_files()
        elif self.options.new and have_git:
            # Get all files changed in the working repo.
            self.log("Doctesting files changed since last git commit")
            import subprocess
            change = subprocess.check_output(["git",
                                              "--git-dir=" + DOT_GIT,
                                              "--work-tree=" + SAGE_ROOT,
                                              "status",
                                              "--porcelain"])
            for line in change.split("\n"):
                if not line:
                    continue
                data = line.strip().split(' ')
                status, filename = data[0], data[-1]
                if (set(status).issubset("MARCU")
                    and filename.startswith("src/sage")
                    and (filename.endswith(".py") or filename.endswith(".pyx"))):
                    self.files.append(os.path.relpath(opj(SAGE_ROOT,filename)))
        if self.options.sagenb:
            if not self.options.all:
                self.log("Doctesting the Sage notebook.")
            from pkg_resources import Requirement, working_set
            sagenb_loc = working_set.find(Requirement.parse('sagenb')).location
            self.files.append(opj(sagenb_loc, 'sagenb'))
Exemplo n.º 32
0
        t.append(x)
    return '\n'.join(t)


try:
    from sage.misc.preparser import strip_string_literals
except ImportError:

    def strip_string_literals(code, state=None):
        # todo -- do we need this?
        return code


try:
    from pkg_resources import Requirement, working_set
    SAGENB_VERSION = working_set.find(Requirement.parse('sagenb')).version
except AttributeError:
    SAGENB_VERSION = ""

try:
    import sage.version
    SAGE_VERSION = sage.version.version
except ImportError:
    SAGE_VERSION = ""

try:
    from sage.plot.colors import Color
except ImportError:

    class Color:
        def __init__(self, *args, **kwds):
Exemplo n.º 33
0
def _make_license_table(docdir, reqs=None):
    """
    Generates a file in docs/licenses/licenses_table.rst that
    contains a restructured text table with the name, license, and home-page of
    all distributions that openmdao depends on.
    """
    meta_names = ['name', 'version', 'license', 'home-page']
    headers = [
        '**Distribs Used by OpenMDAO**', '**Version**', '**License**',
        '**Link**'
    ]
    numcols = len(meta_names)
    data_templates = ["%s", "%s", "%s", "%s"]
    col_spacer = ' '
    max_col_width = 80

    license_fname = os.path.join(docdir, 'licenses', 'licenses_table.txt')

    if reqs is None:
        reqs = [Requirement.parse(p) for p in get_openmdao_packages()]

    reqset = set(reqs)
    dists = set()
    done = set()
    while reqset:
        req = reqset.pop()
        if req.project_name not in done:
            done.add(req.project_name)
            dist = working_set.find(req)
            if dist is not None:
                dists.add(dist)
                reqset.update(dist.requires())

    metadict = {}
    for dist in dists:
        metadict[dist.project_name] = get_dist_metadata(dist)
    for projname, meta in metadict.items():
        for i, name in enumerate(meta_names):
            try:
                meta[name] = data_templates[i] % str(meta[name])
            except KeyError:
                meta[name] = 'UNKNOWN'
        if meta['name'] == 'UNKNOWN':
            meta['name'] = projname
    # figure out sizes of table columns
    colwidths = [len(s) + 1 for s in headers]
    for i, name in enumerate(meta_names):
        sz = max([len(m[name]) for m in metadict.values()]) + 1
        sz = min(sz, max_col_width)
        colwidths[i] = max(colwidths[i], sz)

    with open(license_fname, 'wb') as outfile:
        # write header
        outfile.write(_get_border_line(numcols, colwidths, char='='))
        for i, header in enumerate(headers):
            outfile.write(header + ' ' * (colwidths[i] - len(header)))
            outfile.write(col_spacer)
        outfile.write('\n')
        outfile.write(_get_border_line(numcols, colwidths, char='='))

        # write table data
        tups = [(k, v) for k, v in metadict.items()]
        tups = sorted(tups, lambda x, y: cmp(x[0].lower(), y[0].lower()))
        for j, tup in enumerate(tups):
            for i, name in enumerate(meta_names):
                outfile.write(_get_table_cell(tup[1][name], colwidths[i]))
                outfile.write(col_spacer)
            outfile.write('\n')
            if j < len(tups) - 1:
                outfile.write(_get_border_line(numcols, colwidths, char='-'))

        # bottom border
        outfile.write(_get_border_line(numcols, colwidths, char='='))
        outfile.write('\n')
Exemplo n.º 34
0
"""Utilities for powerdns models"""

from pkg_resources import working_set, Requirement

import rules
from django.conf import settings
from django.contrib.contenttypes.fields import GenericRelation
from django.core.mail import send_mail
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.translation import ugettext_lazy as _
from threadlocals.threadlocals import get_current_user
from dj.choices import Choices

VERSION = working_set.find(Requirement.parse('django-powerdns-dnssec')).version

# Due to the idiotic way permissions work in admin, we need to give users
# generic 'change' view (so they see the changelist), bo no generic 'delete'
# view (so they can't bulk-delete).


@rules.predicate
def no_object(user, object_):
    return object_ is None


@rules.predicate
def is_owner(user, object_):
    return object_ and object_.owner == user

Exemplo n.º 35
0
    def add_files(self):
        r"""
        Checks for the flags '--all', '--new' and '--sagenb'.

        For each one present, this function adds the appropriate directories and files to the todo list.

        EXAMPLES::

            sage: from sage.doctest.control import DocTestDefaults, DocTestController
            sage: from sage.env import SAGE_SRC
            sage: import os
            sage: log_location = os.path.join(SAGE_TMP, 'control_dt_log.log')
            sage: DD = DocTestDefaults(all=True, logfile=log_location)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()
            Doctesting entire Sage library.
            sage: os.path.join(SAGE_SRC, 'sage') in DC.files
            True

        ::

            sage: DD = DocTestDefaults(new = True)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()
            Doctesting ...

        ::

            sage: DD = DocTestDefaults(sagenb = True)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()  # py2
            Doctesting the Sage notebook.
            sage: DC.files[0][-6:]  # py2
            'sagenb'
        """
        opj = os.path.join
        from sage.env import SAGE_SRC, SAGE_DOC_SRC, SAGE_ROOT, SAGE_ROOT_GIT
        # SAGE_ROOT_GIT can be None on distributions which typically
        # only have the SAGE_LOCAL install tree but not SAGE_ROOT
        if SAGE_ROOT_GIT is not None:
            have_git = os.path.isdir(SAGE_ROOT_GIT)
        else:
            have_git = False

        def all_files():
            self.files.append(opj(SAGE_SRC, 'sage'))
            # Don't run these tests when not in the git repository; they are
            # of interest for building sage, but not for runtime behavior and
            # don't make sense to run outside a build environment
            if have_git:
                self.files.append(opj(SAGE_SRC, 'sage_setup'))
            self.files.append(SAGE_DOC_SRC)
            self.options.sagenb = True

        if self.options.all or (self.options.new and not have_git):
            self.log("Doctesting entire Sage library.")
            all_files()
        elif self.options.new and have_git:
            # Get all files changed in the working repo.
            self.log("Doctesting files changed since last git commit")
            import subprocess
            change = subprocess.check_output(["git",
                                              "--git-dir=" + SAGE_ROOT_GIT,
                                              "--work-tree=" + SAGE_ROOT,
                                              "status",
                                              "--porcelain"])
            change = change.decode('utf-8')
            for line in change.split("\n"):
                if not line:
                    continue
                data = line.strip().split(' ')
                status, filename = data[0], data[-1]
                if (set(status).issubset("MARCU")
                    and filename.startswith("src/sage")
                    and (filename.endswith(".py") or
                         filename.endswith(".pyx") or
                         filename.endswith(".rst"))):
                    self.files.append(os.path.relpath(opj(SAGE_ROOT,filename)))
        if self.options.sagenb:
            if six.PY3:
                if not self.options.all:
                    self.log("Skipping doctesting of the Sage notebook: "
                             "not installed on Python 3")
                return

            if not self.options.all:
                self.log("Doctesting the Sage notebook.")
            from pkg_resources import Requirement, working_set
            sagenb_loc = working_set.find(Requirement.parse('sagenb')).location
            self.files.append(opj(sagenb_loc, 'sagenb'))
Exemplo n.º 36
0
def find_resource(project, resource_path, alt_path=None, return_path=False):
    """ Returns a file object or file path pointing to the desired resource.

    Parameters
    ----------
    project : str
        The name of the project to look for the resource in. Can be the name or
        a requirement string. Ex: 'MyProject', 'MyProject>1.0', 'MyProject==1.1'
    resource_path : str
        The path to the file from inside the package. If the file desired is
        MyProject/data/image.jpg, resource_path would be 'data/image.jpg'.
    alt_path : str
        The path to the resource relative to the location of the application's
        top-level script (the one with __main__). If this function is called in
        code/scripts/myscript.py and the resource is code/data/image.jpg, the
        alt_path would be '../data/image.jpg'. This path is only used if the
        resource cannot be found using setuptools.
    return_path : bool
        Determines whether the function should return a file object or a full
        path to the resource.

    Returns
    -------
    file : file object or file path
        A file object containing the resource. If return_path is True, 'file'
        will be the full path to the resource. If the file is not found or
        cannot be opened, None is returned.

    Description
    -----------
    This function will find a desired resource file and return an opened file
    object. The main method of finding the resource uses the pkg_resources
    resource_stream method, which searches your working set for the installed
    project specified and appends the resource_path given to the project
    path, leading it to the file. If setuptools is not installed or it cannot
    find/open the resource, find_resource will use the sys.path[0] to find the
    resource if alt_path is defined.
    """

    try:
        # Get the image using the pkg_resources resource_stream module, which
        # will find the file by getting the Chaco install path and appending the
        # image path. This method works in all cases as long as setuptools is
        # installed. If setuptools isn't installed, the backup sys.path[0]
        # method is used.
        from pkg_resources import resource_stream, working_set, Requirement

        # Get a requirement for the project
        requirement = Requirement.parse(project)

        if return_path:
            dist = working_set.find(requirement)
            full_path = os.path.join(dist.location, resource_path)

            # If the path exists, return it
            if os.path.exists(full_path):
                return full_path
            else:
                raise
        else:
            return resource_stream(requirement, resource_path)

    except:
        # Setuptools was either not installed, or it failed to find the file.
        # First check to see if the package was installed using egginst by
        # looking for the file at: site-packages\\resouce_path
        full_path = os.path.join(get_python_lib(), resource_path)
        if os.path.exists(full_path):
            if return_path:
                return full_path
            else:
                return open(full_path, "rb")

        # Get the image using sys.path[0], which is the directory that the
        # running script lives in. The path to the file is then constructed by
        # navigating from the script's location. This method only works if this
        # script is called directly from the command line using
        # 'python %SOMEPATH%/<script>'
        if alt_path is None:
            return
        if return_path:
            return os.path.join(sys.path[0], alt_path)

        # Try to open the file, return None on exception
        try:
            return open(os.path.join(sys.path[0], alt_path), "rb")
        except:
            return
Exemplo n.º 37
0
def _pkg_sphinx_info(startdir, pkg, outfile, show_undoc=False,
                    underline='-'):
    """Generate Sphinx autodoc directives for all of the modules in 
    the given package.
    
    """
    # locate the package directory
    topdir = pkg
    pkgdir = pkg
    
    dist = working_set.find(Requirement.parse(pkg))
    if dist is None:
        logging.error('no dist found for Requirement(%s)'%pkg)
    print >> outfile, 'Package %s' % pkg
    print >> outfile, underline*(len('Package ')+len(pkg))
    print >> outfile, '\n\n'
    
    # this behaves strangely, maybe because we use namespace pkgs?
    # mod points to module 'openmdao', not 'openmdao.whatever', so we
    # have to access 'whatever' through the 'openmdao' module
    mod = __import__(pkg)
    docs = getattr(mod, pkg.split('.')[1]).__doc__
    if docs:
        print >> outfile, docs, '\n'
    
    #excluding traits now since they need to be sorted separately
    names = list(_get_resource_files(dist,
                                    ['*__init__.py','*setup.py','*/test/*.py', '*datatypes*.py'],
                                    ['*.py']))
    names.sort()
    
    #wanted to sort traits separately based only on filenames despite differing paths
    traitz = list(_get_resource_files(dist, ['*__init__.py','*setup.py','*/test/*.py'], ['*datatypes*.py']))
    sorted_traitz = sorted(traitz, cmp=_compare_traits_path)
    
    names.extend(sorted_traitz)

    exdirs = ['build', 'examples']
    
    oldheader = None
    newheader = None

    for name in names:
        if os.path.basename(name) == 'releaseinfo.py':
            continue

        for ex in exdirs:
            if  name.startswith('%s/' % ex) or '/%s/'%ex in name:
                break
            else:       
                x = name.split('/')
                #kind of dirty, but the other sections doesn't need api header.
                if os.path.basename(name) == 'api.py' and x[1]=='lib':
                    newheader = 'api'
                if len(x) >= 4:
                    newheader =  x[2]
            if (oldheader != newheader):
                print >> outfile, '**%s**' % newheader.upper()
                print >> outfile, '_'*(4+len(newheader)) + '\n'
                oldheader = newheader
               
        _mod_sphinx_info(name, outfile, show_undoc=show_undoc)
Exemplo n.º 38
0
def find_resource(project, resource_path, alt_path=None, return_path=False):
    """ Returns a file object or file path pointing to the desired resource.
    
    Parameters
    ----------
    project : string
        The name of the project to look for the resource in. Can be the name or
        a requirement string. Ex: 'MyProject', 'MyProject>1.0', 'MyProject==1.1'
    resource_path : string
        The path to the file from inside the package. If the file desired is
        MyProject/data/image.jpg, resource_path would be 'data/image.jpg'.
    alt_path : string
        The path to the resource relative to the location of the application's
        top-level script (the one with __main__). If this function is called in
        code/scripts/myscript.py and the resource is code/data/image.jpg, the
        alt_path would be '../data/image.jpg'. This path is only used if the
        resource cannot be found using setuptools.
    return_path : bool
        Determines whether the function should return a file object or a full
        path to the resource.
        
    Returns
    -------
    file : file object or file path
        A file object containing the resource. If return_path is True, 'file'
        will be the full path to the resource. If the file is not found or
        cannot be opened, None is returned.
        
    Description
    -----------
    This function will find a desired resource file and return an opened file
    object. The main method of finding the resource uses the pkg_resources
    resource_stream method, which searches your working set for the installed
    project specified and appends the resource_path given to the project
    path, leading it to the file. If setuptools is not installed or it cannot
    find/open the resource, find_resource will use the sys.path[0] to find the
    resource if alt_path is defined.
    """
    
    try:
        # Get the image using the pkg_resources resource_stream module, which
        # will find the file by getting the Chaco install path and appending the
        # image path. This method works in all cases as long as setuptools is
        # installed. If setuptools isn't installed, the backup sys.path[0]
        # method is used.
        from pkg_resources import resource_stream, working_set, Requirement
            
        # Get a requirement for the project
        requirement = Requirement.parse(project)
        
        if return_path:
            dist = working_set.find(requirement)
            full_path = os.path.join(dist.location, resource_path)
            
            # If the path exists, return it
            if os.path.exists(full_path):
                return full_path
            else:
                raise
        else:
            return resource_stream(requirement, resource_path)
            
    except:
        # Setuptools was either not installed, or it failed to find the file.
        # First check to see if the package was installed using egginst by
        # looking for the file at: site-packages\\resouce_path
        full_path = os.path.join(get_python_lib(), resource_path)
        if os.path.exists(full_path):
            if return_path:
                return full_path
            else:
                return open(full_path, 'rb')
        
        # Get the image using sys.path[0], which is the directory that the
        # running script lives in. The path to the file is then constructed by
        # navigating from the script's location. This method only works if this
        # script is called directly from the command line using
        # 'python %SOMEPATH%/<script>'
        if alt_path is None:
            return
        if return_path:
            return os.path.join(sys.path[0], alt_path)

        # Try to open the file, return None on exception
        try:
            return open(os.path.join(sys.path[0], alt_path), 'rb')
        except:
            return
Exemplo n.º 39
0

import webbrowser
import os
import sys

from pkg_resources import working_set
from pkg_resources import Requirement

pkg_name = 'pabwgtsite'
site_path = working_set.find(Requirement.parse(pkg_name)).location + 
'\\WGT_Website\\'



command = 'start python' + site_path + 'manage.py runserver'
os.system(command)


url = 'http://localhost:8000/'
arg = ""
if len(sys.argv) >= 2:
 arg = sys.argv[1]
url = url + arg

webbrowser.open(url)




Exemplo n.º 40
0
# -*- coding: utf-8 -*-
"""
Various utilities and helpers
"""

import sys
from os.path import dirname
from os.path import isabs
from os.path import normpath
from os.path import relpath

try:
    from pkg_resources import working_set
    from pkg_resources import Requirement
    ply_dist = working_set.find(Requirement.parse('ply'))
except ImportError:  # pragma: no cover
    ply_dist = None

py_major = sys.version_info.major
unicode = unicode if py_major < 3 else None  # noqa: F821
str = str if sys.version_info.major > 2 else unicode  # noqa: F821


def repr_compat(s):
    """
    Since Python 2 is annoying with unicode literals, and that we are
    enforcing the usage of unicode, this ensures the repr doesn't spew
    out the unicode literal prefix.
    """

    if unicode and isinstance(s, unicode):
Exemplo n.º 41
0
def _make_license_table(docdir, reqs=None):
    """
    Generates a file in docs/licenses/licenses_table.rst that
    contains a restructured text table with the name, license, and home-page of
    all distributions that openmdao depends on.
    """
    meta_names = ['name', 'version', 'license', 'home-page']
    headers = ['**Distribs Used by OpenMDAO**',
               '**Version**',
               '**License**',
               '**Link**']
    numcols = len(meta_names)
    data_templates = ["%s", "%s", "%s", "%s"]
    col_spacer = ' '
    max_col_width = 80

    license_fname = os.path.join(docdir, 'licenses', 'licenses_table.txt')

    if reqs is None:
        reqs = [Requirement.parse(p) for p in get_openmdao_packages()]

    reqset = set(reqs)
    dists = set()
    done = set()
    while reqset:
        req = reqset.pop()
        if req.project_name not in done:
            done.add(req.project_name)
            dist = working_set.find(req)
            if dist is not None:
                dists.add(dist)
                reqset.update(dist.requires())

    metadict = {}
    for dist in dists:
        metadict[dist.project_name] = get_dist_metadata(dist)
    for projname, meta in metadict.items():
        for i, name in enumerate(meta_names):
            try:
                meta[name] = data_templates[i] % str(meta[name])
            except KeyError:
                meta[name] = 'UNKNOWN'
        if meta['name'] == 'UNKNOWN':
            meta['name'] = projname
    # figure out sizes of table columns
    colwidths = [len(s)+1 for s in headers]
    for i, name in enumerate(meta_names):
        sz = max([len(m[name]) for m in metadict.values()])+1
        sz = min(sz, max_col_width)
        colwidths[i] = max(colwidths[i], sz)

    with open(license_fname, 'wb') as outfile:
        # write header
        outfile.write(_get_border_line(numcols, colwidths, char='='))
        for i, header in enumerate(headers):
            outfile.write(header+' '*(colwidths[i]-len(header)))
            outfile.write(col_spacer)
        outfile.write('\n')
        outfile.write(_get_border_line(numcols, colwidths, char='='))

        # write table data
        tups = [(k, v) for k, v in metadict.items()]
        tups = sorted(tups, lambda x, y: cmp(x[0].lower(), y[0].lower()))
        for j, tup in enumerate(tups):
            for i, name in enumerate(meta_names):
                outfile.write(_get_table_cell(tup[1][name], colwidths[i]))
                outfile.write(col_spacer)
            outfile.write('\n')
            if j < len(tups) - 1:
                outfile.write(_get_border_line(numcols, colwidths, char='-'))

        # bottom border
        outfile.write(_get_border_line(numcols, colwidths, char='='))
        outfile.write('\n')
Exemplo n.º 42
0
    def add_files(self):
        r"""
        Checks for the flags '--all', '--new' and '--sagenb'.

        For each one present, this function adds the appropriate directories and files to the todo list.

        EXAMPLES::

            sage: from sage.doctest.control import DocTestDefaults, DocTestController
            sage: from sage.env import SAGE_SRC
            sage: import os
            sage: log_location = os.path.join(SAGE_TMP, 'control_dt_log.log')
            sage: DD = DocTestDefaults(all=True, logfile=log_location)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()
            Doctesting entire Sage library.
            sage: os.path.join(SAGE_SRC, 'sage') in DC.files
            True

        ::

            sage: DD = DocTestDefaults(new = True)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()
            Doctesting ...

        ::

            sage: DD = DocTestDefaults(sagenb = True)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()  # py2
            Doctesting the Sage notebook.
            sage: DC.files[0][-6:]  # py2
            'sagenb'
        """
        opj = os.path.join
        from sage.env import SAGE_SRC, SAGE_DOC_SRC, SAGE_ROOT
        DOT_GIT = opj(SAGE_ROOT, '.git')
        have_git = os.path.exists(DOT_GIT)

        def all_files():
            self.files.append(opj(SAGE_SRC, 'sage'))
            # Don't run these tests when not in the git repository; they are
            # of interest for building sage, but not for runtime behavior and
            # don't make sense to run outside a build environment
            if have_git:
                self.files.append(opj(SAGE_SRC, 'sage_setup'))
            self.files.append(SAGE_DOC_SRC)
            self.options.sagenb = True

        if self.options.all or (self.options.new and not have_git):
            self.log("Doctesting entire Sage library.")
            all_files()
        elif self.options.new and have_git:
            # Get all files changed in the working repo.
            self.log("Doctesting files changed since last git commit")
            import subprocess
            change = subprocess.check_output([
                "git", "--git-dir=" + DOT_GIT, "--work-tree=" + SAGE_ROOT,
                "status", "--porcelain"
            ])
            change = change.decode('utf-8')
            for line in change.split("\n"):
                if not line:
                    continue
                data = line.strip().split(' ')
                status, filename = data[0], data[-1]
                if (set(status).issubset("MARCU")
                        and filename.startswith("src/sage") and
                    (filename.endswith(".py") or filename.endswith(".pyx")
                     or filename.endswith(".rst"))):
                    self.files.append(os.path.relpath(opj(SAGE_ROOT,
                                                          filename)))
        if self.options.sagenb:
            if six.PY3:
                if not self.options.all:
                    self.log("Skipping doctesting of the Sage notebook: "
                             "not installed on Python 3")
                return

            if not self.options.all:
                self.log("Doctesting the Sage notebook.")
            from pkg_resources import Requirement, working_set
            sagenb_loc = working_set.find(Requirement.parse('sagenb')).location
            self.files.append(opj(sagenb_loc, 'sagenb'))
Exemplo n.º 43
0
    def add_files(self):
        """
        Checks for the flags '--all', '--new' and '--sagenb'.

        For each one present, this function adds the appropriate directories and files to the todo list.

        EXAMPLES::

            sage: from sage.doctest.control import DocTestDefaults, DocTestController
            sage: from sage.env import SAGE_SRC
            sage: import os
            sage: log_location = os.path.join(SAGE_TMP, 'control_dt_log.log')
            sage: DD = DocTestDefaults(all=True, logfile=log_location)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()
            Doctesting entire Sage library.
            sage: os.path.join(SAGE_SRC, 'sage') in DC.files
            True

        ::

            sage: DD = DocTestDefaults(new = True)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()
            Doctesting files changed since last HG commit.
            sage: len(DC.files) == len([L for L in hg_sage('status', interactive=False, debug=False)[0].split('\n') if len(L.split()) ==2 and L.split()[0] in ['M','A']])
            True

        ::

            sage: DD = DocTestDefaults(sagenb = True)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()
            Doctesting the Sage notebook.
            sage: DC.files[0][-6:]
            'sagenb'
        """
        opj = os.path.join
        from sage.env import SAGE_SRC as base
        if self.options.all:
            self.log("Doctesting entire Sage library.")
            from glob import glob
            self.files.append(opj(base, 'sage'))
            self.files.append(opj(base, 'doc', 'common'))
            self.files.extend(glob(opj(base, 'doc', '[a-z][a-z]')))
            self.options.sagenb = True
        elif self.options.new:
            self.log("Doctesting files changed since last HG commit.")
            import sage.all_cmdline
            from sage.misc.hg import hg_sage
            for X in hg_sage('status', interactive=False,
                             debug=False)[0].split('\n'):
                tup = X.split()
                if len(tup) != 2: continue
                c, filename = tup
                if c in ['M', 'A']:
                    filename = opj(base, filename)
                    self.files.append(filename)
        if self.options.sagenb:
            if not self.options.all:
                self.log("Doctesting the Sage notebook.")
            from pkg_resources import Requirement, working_set
            sagenb_loc = working_set.find(Requirement.parse('sagenb')).location
            self.files.append(opj(sagenb_loc, 'sagenb'))
Exemplo n.º 44
0
    def add_files(self):
        r"""
        Checks for the flags '--all', '--new' and '--sagenb'.

        For each one present, this function adds the appropriate directories and files to the todo list.

        EXAMPLES::

            sage: from sage.doctest.control import DocTestDefaults, DocTestController
            sage: from sage.env import SAGE_SRC
            sage: import os
            sage: log_location = os.path.join(SAGE_TMP, 'control_dt_log.log')
            sage: DD = DocTestDefaults(all=True, logfile=log_location)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()
            Doctesting entire Sage library.
            sage: os.path.join(SAGE_SRC, 'sage') in DC.files
            True

        ::

            sage: DD = DocTestDefaults(new = True)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()
            Doctesting files ...

        ::

            sage: DD = DocTestDefaults(sagenb = True)
            sage: DC = DocTestController(DD, [])
            sage: DC.add_files()
            Doctesting the Sage notebook.
            sage: DC.files[0][-6:]
            'sagenb'
        """
        opj = os.path.join
        from sage.env import SAGE_SRC
        if self.options.all:
            self.log("Doctesting entire Sage library.")
            from glob import glob
            self.files.append(opj(SAGE_SRC, 'sage'))
            self.files.append(opj(SAGE_SRC, 'doc', 'common'))
            self.files.extend(glob(opj(SAGE_SRC, 'doc', '[a-z][a-z]')))
            self.options.sagenb = True
        elif self.options.new:
            # Get all files changed in the working repo, as well as all
            # files in the top Mercurial queue patch.
            from sage.misc.hg import hg_sage
            out, err = hg_sage('status --rev qtip^', interactive=False, debug=False)
            if not err:
                qtop = hg_sage('qtop', interactive=False, debug=False)[0].strip()
                self.log("Doctesting files in mq patch " + repr(qtop))
            else:  # Probably mq isn't used
                out, err = hg_sage('status', interactive=False, debug=False)
                if not err:
                    self.log("Doctesting files changed since last hg commit")
                else:
                    raise RuntimeError("failed to run hg status:\n" + err)

            for X in out.split('\n'):
                tup = X.split()
                if len(tup) != 2: continue
                c, filename = tup
                if c in ['M','A']:
                    filename = opj(SAGE_SRC, filename)
                    if not skipfile(filename):
                        self.files.append(filename)
        if self.options.sagenb:
            if not self.options.all:
                self.log("Doctesting the Sage notebook.")
            from pkg_resources import Requirement, working_set
            sagenb_loc = working_set.find(Requirement.parse('sagenb')).location
            self.files.append(opj(sagenb_loc, 'sagenb'))
Exemplo n.º 45
0
 def _check_requirement(self, requirement):
     dist = None
     try:
         dist = working_set.find(Requirement.parse(requirement))
     except (ValueError, VersionConflict), err:
         pass
Exemplo n.º 46
0
def _pkg_sphinx_info(startdir, pkg, outfile, show_undoc=False, underline='-'):
    """Generate Sphinx autodoc directives for all of the modules in
    the given package.

    """
    # locate the package directory
    topdir = pkg
    pkgdir = pkg

    dist = working_set.find(Requirement.parse(pkg))
    if dist is None:
        logging.error('no dist found for Requirement(%s)' % pkg)
    print >> outfile, 'Package %s' % pkg
    print >> outfile, underline * (len('Package ') + len(pkg))
    print >> outfile, '\n\n'

    __import__(pkg)
    mod = sys.modules[pkg]
    docs = mod.__doc__

    if docs:
        print >> outfile, docs, '\n'

    #excluding traits now since they need to be sorted separately
    #also excluding gui-related files, in case of non-gui build
    _names = list(
        _get_resource_files(dist, [
            '*__init__.py', '*setup.py', '*datatypes*.py', '*/main/zmq*.py',
            '*/main/tornado*.py', '*/gui/*/views.py', '*/gui/*/models.py',
            '*/gui/*/urls.py', '*/gui/*/admin.py'
        ], ['*.py']))
    names = []
    for n in _names:
        parts = n.split('/')
        if parts[0] == 'openmdao' and parts[1] == 'test':
            if len(parts) > 2 and parts[2] != 'plugins':
                names.append(n)
        elif 'test' not in parts:
            names.append(n)

    names.sort()

    #wanted to sort traits separately based only on filenames despite differing paths
    traitz = list(
        _get_resource_files(dist, ['*__init__.py', '*setup.py', '*/test/*.py'],
                            ['*/main/datatypes*.py', '*/lib/datatypes*.py']))
    sorted_traitz = sorted(traitz, cmp=_compare_traits_path)

    names.extend(sorted_traitz)

    exdirs = ['build', 'examples']

    oldheader = None
    newheader = None

    for name in names:
        if os.path.basename(name) == 'releaseinfo.py':
            continue

        for ex in exdirs:
            if name.startswith('%s/' % ex) or '/%s/' % ex in name:
                break
            else:
                x = name.split('/')
                #kind of dirty, but the other sections don't need api header.
                if os.path.basename(name) == 'api.py' and x[1] == 'lib':
                    newheader = 'api'
                if len(x) >= 4:
                    newheader = x[2]
            if (oldheader != newheader):
                print >> outfile, '**%s**' % newheader.upper()
                print >> outfile, '_' * (4 + len(newheader)) + '\n'
                oldheader = newheader

        _mod_sphinx_info(name, outfile, show_undoc=show_undoc)
Exemplo n.º 47
0
from datetime import datetime
from optparse import OptionParser
from pkg_resources import WorkingSet, Environment, Requirement
from pkg_resources import working_set
try:
    from urllib.request import urlopen  # py3
except ImportError:
    from urllib2 import urlopen  # py2
try:
    from configparser import ConfigParser  # py3
except ImportError:
    from ConfigParser import ConfigParser  # py2

logger = logging.getLogger(os.path.basename(sys.argv[0].rsplit('.', 1)[0]))

DISTRIBUTE = working_set.find(Requirement.parse('distribute')) is not None
del working_set  # I don't like ambiguity

# Note, I tried bootstrapping with an already present zc.buildout, and
# it is not capable to use the 'buildout:executable' option to change the
# running Python

bootstrap_script_tmpl = """import sys
sys.path[0:0] = [
  %(setuptools_path)r,
  %(buildout_path)r
  ]

import zc.buildout.buildout
sys.argv.append('bootstrap')
Exemplo n.º 48
0
#     contributed to the notebook, to whatever suits you."
#
###########################################################################

import os
import keyboards
from template import template
from sagenb.misc.misc import SAGE_URL
from compress.JavaScriptCompressor import JavaScriptCompressor

# Debug mode?  If sagenb lives under SAGE_ROOT/, we minify and cache
# the Notebook JS library.
try:
    from sage.misc.misc import SAGE_ROOT
    from pkg_resources import Requirement, working_set
    sagenb_path = working_set.find(Requirement.parse('sagenb')).location
    debug_mode = SAGE_ROOT not in os.path.realpath(sagenb_path)
except (AttributeError, ImportError):
    debug_mode = False

_cache_javascript = None
def javascript():
    """
    Return javascript library for the FEMhub Online Lab.  This is done by
    reading the template ``notebook_lib.js`` where all of the
    javascript code is contained and replacing a few of the values
    specific to the running session.

    Before the code is returned (as a string), it is run through a
    JavascriptCompressor to minimize the amount of data needed to be
    sent to the browser.
Exemplo n.º 49
0
"""Utilities for powerdns models"""

from pkg_resources import working_set, Requirement

import rules
from django.conf import settings
from django.contrib.contenttypes.fields import GenericRelation
from django.core.mail import send_mail
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.translation import ugettext_lazy as _
from threadlocals.threadlocals import get_current_user
from dj.choices import Choices


VERSION = working_set.find(Requirement.parse("django-powerdns-dnssec")).version


# Due to the idiotic way permissions work in admin, we need to give users
# generic 'change' view (so they see the changelist), bo no generic 'delete'
# view (so they can't bulk-delete).


@rules.predicate
def no_object(user, object_):
    return object_ is None


@rules.predicate
def is_owner(user, object_):
    return object_ and object_.owner == user
Exemplo n.º 50
0
    return "\n".join(t)


try:
    from sage.misc.preparser import strip_string_literals
except ImportError:

    def strip_string_literals(code, state=None):
        # todo -- do we need this?
        return code


try:
    from pkg_resources import Requirement, working_set

    SAGENB_VERSION = working_set.find(Requirement.parse("sagenb")).version
except AttributeError:
    SAGENB_VERSION = ""

try:
    import sage.version

    SAGE_VERSION = sage.version.version
except ImportError:
    SAGE_VERSION = ""

try:
    from sage.plot.colors import Color
except ImportError:

    class Color:
Exemplo n.º 51
0
def _pkg_sphinx_info(startdir, pkg, outfile, show_undoc=False, underline="-"):
    """Generate Sphinx autodoc directives for all of the modules in 
    the given package.
    
    """
    # locate the package directory
    topdir = pkg
    pkgdir = pkg

    dist = working_set.find(Requirement.parse(pkg))
    if dist is None:
        logging.error("no dist found for Requirement(%s)" % pkg)
    print >> outfile, "Package %s" % pkg
    print >> outfile, underline * (len("Package ") + len(pkg))
    print >> outfile, "\n\n"

    __import__(pkg)
    mod = sys.modules[pkg]
    docs = mod.__doc__

    if docs:
        print >> outfile, docs, "\n"

    # excluding traits now since they need to be sorted separately
    # also excluding gui-related files, in case of non-gui build
    _names = list(
        _get_resource_files(
            dist,
            [
                "*__init__.py",
                "*setup.py",
                "*datatypes*.py",
                "*/main/zmq*.py",
                "*/main/tornado*.py",
                "*/gui/*/views.py",
                "*/gui/*/models.py",
                "*/gui/*/urls.py",
                "*/gui/*/admin.py",
            ],
            ["*.py"],
        )
    )
    names = []
    for n in _names:
        parts = n.split("/")
        if parts[0] == "openmdao" and parts[1] == "test":
            if len(parts) > 2 and parts[2] != "plugins":
                names.append(n)
        elif "test" not in parts:
            names.append(n)

    names.sort()

    # wanted to sort traits separately based only on filenames despite differing paths
    traitz = list(_get_resource_files(dist, ["*__init__.py", "*setup.py", "*/test/*.py"], ["*/lib/datatypes*.py"]))
    sorted_traitz = sorted(traitz, cmp=_compare_traits_path)

    names.extend(sorted_traitz)

    exdirs = ["build", "examples"]

    oldheader = None
    newheader = None

    for name in names:
        if os.path.basename(name) == "releaseinfo.py":
            continue

        for ex in exdirs:
            if name.startswith("%s/" % ex) or "/%s/" % ex in name:
                break
            else:
                x = name.split("/")
                # kind of dirty, but the other sections don't need api header.
                if os.path.basename(name) == "api.py" and x[1] == "lib":
                    newheader = "api"
                if len(x) >= 4:
                    newheader = x[2]
            if oldheader != newheader:
                print >> outfile, "**%s**" % newheader.upper()
                print >> outfile, "_" * (4 + len(newheader)) + "\n"
                oldheader = newheader

        _mod_sphinx_info(name, outfile, show_undoc=show_undoc)
Exemplo n.º 52
0
# variable SAGE_DOC_MATHJAX is set to "no" or "False".  (Note that if
# the user does not set this variable, then the script sage-env sets
# it to "True".)

if (os.environ.get('SAGE_DOC_MATHJAX', 'no') != 'no'
        and os.environ.get('SAGE_DOC_MATHJAX', 'no') != 'False'):

    extensions.append('sphinx.ext.mathjax')
    mathjax_path = 'MathJax.js?config=TeX-AMS_HTML-full,../mathjax_sage.js'

    from sage.misc.latex_macros import sage_mathjax_macros
    # this is broken for now
    # html_theme_options['mathjax_macros'] = sage_mathjax_macros()

    from pkg_resources import Requirement, working_set
    sagenb_path = working_set.find(Requirement.parse('sagenb')).location
    mathjax_relative = os.path.join('sagenb', 'data', 'mathjax')

    # It would be really nice if sphinx would copy the entire mathjax directory,
    # (so we could have a _static/mathjax directory), rather than the contents of the directory

    mathjax_static = os.path.join(sagenb_path, mathjax_relative)
    html_static_path.append(mathjax_static)
    exclude_patterns = [
        '**/' + os.path.join(mathjax_relative, i)
        for i in ('docs', 'README*', 'test', 'unpacked', 'LICENSE')
    ]
else:
    extensions.append('sphinx.ext.pngmath')

# This is to make the verbatim font smaller;
Exemplo n.º 53
0
 def pkg_version(self, name, default_name='?'):
     dist = working_set.find(Requirement.parse(name))
     name = getattr(dist, 'project_name', name)
     version = getattr(dist, 'version', '?')
     location = getattr(dist, 'location', '?')
     return name, version, location