def test_setup_requires_overrides_version_conflict(self):
        """
        Regression test for issue #323.

        Ensures that a distribution's setup_requires requirements can still be
        installed and used locally even if a conflicting version of that
        requirement is already on the path.
        """

        pr_state = pkg_resources.__getstate__()
        fake_dist = PRDistribution('does-not-matter', project_name='foobar',
                                   version='0.0')
        working_set.add(fake_dist)

        try:
            with contexts.tempdir() as temp_dir:
                test_pkg = create_setup_requires_package(temp_dir)
                test_setup_py = os.path.join(test_pkg, 'setup.py')
                with contexts.quiet() as (stdout, stderr):
                    # Don't even need to install the package, just
                    # running the setup.py at all is sufficient
                    run_setup(test_setup_py, ['--name'])

                lines = stdout.readlines()
                assert len(lines) > 0
                assert lines[-1].strip(), 'test_pkg'
        finally:
            pkg_resources.__setstate__(pr_state)
    def test_setup_requires_overrides_version_conflict(self, use_setup_cfg):
        """
        Regression test for distribution issue 323:
        https://bitbucket.org/tarek/distribute/issues/323

        Ensures that a distribution's setup_requires requirements can still be
        installed and used locally even if a conflicting version of that
        requirement is already on the path.
        """

        fake_dist = PRDistribution('does-not-matter', project_name='foobar',
                                   version='0.0')
        working_set.add(fake_dist)

        with contexts.save_pkg_resources_state():
            with contexts.tempdir() as temp_dir:
                test_pkg = create_setup_requires_package(
                    temp_dir, use_setup_cfg=use_setup_cfg)
                test_setup_py = os.path.join(test_pkg, 'setup.py')
                with contexts.quiet() as (stdout, stderr):
                    # Don't even need to install the package, just
                    # running the setup.py at all is sufficient
                    run_setup(test_setup_py, [str('--name')])

                lines = stdout.readlines()
                assert len(lines) > 0
                assert lines[-1].strip() == 'test_pkg'
 def test_setup_requires_with_attr_version(self, use_setup_cfg):
     def make_dependency_sdist(dist_path, distname, version):
         files = [(
             'setup.py',
             DALS("""
                 import setuptools
                 setuptools.setup(
                     name={name!r},
                     version={version!r},
                     py_modules=[{name!r}],
                 )
                 """.format(name=distname, version=version)),
         ), (
             distname + '.py',
             DALS("""
                 version = 42
                 """),
         )]
         make_sdist(dist_path, files)
     with contexts.save_pkg_resources_state():
         with contexts.tempdir() as temp_dir:
             test_pkg = create_setup_requires_package(
                 temp_dir, setup_attrs=dict(version='attr: foobar.version'),
                 make_package=make_dependency_sdist,
                 use_setup_cfg=use_setup_cfg+('version',),
             )
             test_setup_py = os.path.join(test_pkg, 'setup.py')
             with contexts.quiet() as (stdout, stderr):
                 run_setup(test_setup_py, [str('--version')])
             lines = stdout.readlines()
             assert len(lines) > 0
             assert lines[-1].strip() == '42'
Example #4
0
def _directory_import(path):
    """
    Import astropy_helpers from the given path, which will be added to
    sys.path.

    Must return True if the import succeeded, and False otherwise.
    """

    # Return True on success, False on failure but download is allowed, and
    # otherwise raise SystemExit
    path = os.path.abspath(path)

    # Use an empty WorkingSet rather than the man pkg_resources.working_set,
    # since on older versions of setuptools this will invoke a VersionConflict
    # when trying to install an upgrade
    ws = pkg_resources.WorkingSet([])
    ws.add_entry(path)
    dist = ws.by_key.get(DIST_NAME)

    if dist is None:
        # We didn't find an egg-info/dist-info in the given path, but if a
        # setup.py exists we can generate it
        setup_py = os.path.join(path, 'setup.py')
        if os.path.isfile(setup_py):
            with _silence():
                run_setup(os.path.join(path, 'setup.py'), ['egg_info'])

            for dist in pkg_resources.find_distributions(path, True):
                # There should be only one...
                return dist

    return dist
def test_bootstrap_from_directory(tmpdir, testpackage, capsys):
    """
    Tests simply bundling a copy of the astropy_helpers source code in its
    entirety bundled directly in the source package and not in an archive.
    """

    import ah_bootstrap

    source = tmpdir.mkdir('source')
    testpackage.copy(source.join('_astropy_helpers_test_'))

    with source.as_cwd():
        source.join('setup.py').write(TEST_SETUP_PY.format(args=''))
        run_setup('setup.py', [])
        stdout, stderr = capsys.readouterr()

        stdout = stdout.splitlines()
        if stdout:
            path = stdout[-1].strip()
        else:
            path = ''

        # Ensure that the astropy_helpers used by the setup.py is the one that
        # was imported from git submodule
        assert path == str(source.join('_astropy_helpers_test_',
                                       '_astropy_helpers_test_',
                                       '__init__.py'))
Example #6
0
def _directory_import(path):
    """
    Import astropy_helpers from the given path, which will be added to
    sys.path.

    Must return True if the import succeeded, and False otherwise.
    """

    # Return True on success, False on failure but download is allowed, and
    # otherwise raise SystemExit
    path = os.path.abspath(path)
    pkg_resources.working_set.add_entry(path)
    dist = pkg_resources.working_set.by_key.get(DIST_NAME)

    if dist is None:
        # We didn't find an egg-info/dist-info in the given path, but if a
        # setup.py exists we can generate it
        setup_py = os.path.join(path, 'setup.py')
        if os.path.isfile(setup_py):
            with _silence():
                run_setup(os.path.join(path, 'setup.py'), ['egg_info'])

            for dist in pkg_resources.find_distributions(path, True):
                # There should be only one...
                pkg_resources.working_set.add(dist, path, False)
                break

    return dist
    def test_setup_requires_override_nspkg(self, use_setup_cfg):
        """
        Like ``test_setup_requires_overrides_version_conflict`` but where the
        ``setup_requires`` package is part of a namespace package that has
        *already* been imported.
        """

        with contexts.save_pkg_resources_state():
            with contexts.tempdir() as temp_dir:
                foobar_1_archive = os.path.join(temp_dir, 'foo.bar-0.1.tar.gz')
                make_nspkg_sdist(foobar_1_archive, 'foo.bar', '0.1')
                # Now actually go ahead an extract to the temp dir and add the
                # extracted path to sys.path so foo.bar v0.1 is importable
                foobar_1_dir = os.path.join(temp_dir, 'foo.bar-0.1')
                os.mkdir(foobar_1_dir)
                with tarfile.open(foobar_1_archive) as tf:
                    tf.extractall(foobar_1_dir)
                sys.path.insert(1, foobar_1_dir)

                dist = PRDistribution(foobar_1_dir, project_name='foo.bar',
                                      version='0.1')
                working_set.add(dist)

                template = DALS("""\
                    import foo  # Even with foo imported first the
                                # setup_requires package should override
                    import setuptools
                    setuptools.setup(**%r)

                    if not (hasattr(foo, '__path__') and
                            len(foo.__path__) == 2):
                        print('FAIL')

                    if 'foo.bar-0.2' not in foo.__path__[0]:
                        print('FAIL')
                """)

                test_pkg = create_setup_requires_package(
                    temp_dir, 'foo.bar', '0.2', make_nspkg_sdist, template,
                    use_setup_cfg=use_setup_cfg)

                test_setup_py = os.path.join(test_pkg, 'setup.py')

                with contexts.quiet() as (stdout, stderr):
                    try:
                        # Don't even need to install the package, just
                        # running the setup.py at all is sufficient
                        run_setup(test_setup_py, [str('--name')])
                    except pkg_resources.VersionConflict:
                        self.fail(
                            'Installing setup.py requirements '
                            'caused a VersionConflict')

                assert 'FAIL' not in stdout.getvalue()
                lines = stdout.readlines()
                assert len(lines) > 0
                assert lines[-1].strip() == 'test_pkg'
def test_download_if_needed(tmpdir, testpackage, capsys):
    """
    Tests the case where astropy_helpers was not actually included in a
    package, or is otherwise missing, and we need to "download" it.

    This does not test actually downloading from the internet--this is normally
    done through setuptools' easy_install command which can also install from a
    source archive.  From the point of view of ah_boostrap the two actions are
    equivalent, so we can just as easily simulate this by providing a setup.cfg
    giving the path to a source archive to "download" (as though it were a
    URL).
    """

    source = tmpdir.mkdir('source')

    # Ensure ah_bootstrap is imported from the local directory
    import ah_bootstrap

    # Make a source distribution of the test package
    with silence():
        run_setup(str(testpackage.join('setup.py')),
                  ['sdist', '--dist-dir=dist', '--formats=gztar'])

    dist_dir = testpackage.join('dist')

    with source.as_cwd():
        source.join('setup.py').write(TEST_SETUP_PY.format(
            args='download_if_needed=True'))
        source.join('setup.cfg').write(textwrap.dedent("""\
            [easy_install]
            find_links = {find_links}
        """.format(find_links=str(dist_dir))))

        run_setup('setup.py', [])

        stdout, stderr = capsys.readouterr()

        # Just take the last line--on Python 2.6 distutils logs warning
        # messages to stdout instead of stderr, causing them to be mixed up
        # with our expected output
        path = stdout.splitlines()[-1].strip()

        # easy_install should have worked by 'installing' astropy_helpers as a
        # .egg in the current directory
        eggs = glob.glob('*.egg')
        assert eggs
        egg = source.join(eggs[0])
        assert os.path.isdir(str(egg))

        a = os.path.normcase(path)
        b = os.path.normcase(str(egg.join('_astropy_helpers_test_',
                                          '__init__.py')))
        assert a == b
Example #9
0
File: dist.py Project: ibank/asuka
    def archive_package(self, cache=True):
        """Downloads the source tree and makes the source distribution.
        It yields triple of package name, filename of the source
        distribution, and its full path. ::

            with build.archive_package() as (package, filename, path):
                sftp.put(path, filename)

        :param cache: whether to cache the package file or not.
                      ``True`` by default
        :type cache: :class:`bool`

        """
        logger_ = self.get_logger('archive_package')
        with self.branch.fetch(self.commit.ref) as path:
            setup_script = os.path.join(path, 'setup.py')
            if not os.path.isfile(setup_script):
                raise IOError('cannot found setup.py script in the source '
                              'tree {0!r}'.format(self.commit))
            tag = '.{0}.{1:%Y%m%d%H%M%S}.{2!s:.7}'.format(
                self.branch.label,
                self.commit.committed_at.astimezone(UTC()),
                self.commit
            )
            with capture_stdout() as buffer_:
                run_setup(setup_script, ['--fullname'])
                fullname = buffer_.getvalue().rstrip().splitlines()[-1]
            package_name = fullname + tag
            filename = package_name + '.tar.bz2'
            if cache:
                cache_dir_path = os.path.join(
                    tempfile.gettempdir(),
                    'asuka-dist-cache'
                )
                if not os.path.isdir(cache_dir_path):
                    os.makedirs(cache_dir_path)
                cache_path = os.path.join(cache_dir_path, filename)
                if os.path.isfile(cache_path):
                    logger_.info('cache exists: %s, skipping sdist...',
                                 cache_path)
                    yield package_name, filename, cache_path
                    return
            run_setup(setup_script, [
                'egg_info', '--tag-build', tag,
                'sdist', '--formats=bztar'
            ])
            filepath = os.path.join(path, 'dist', filename)
            logger_.info('sdist_path = %r', filepath)
            if cache:
                logger_.info('save sdist cache %s...', cache_path)
                shutil.copyfile(filepath, cache_path)
            yield package_name, filename, filepath
Example #10
0
    def test_setup_requires(self):
        """Regression test for Distribute issue #318

        Ensure that a package with setup_requires can be installed when
        setuptools is installed in the user site-packages without causing a
        SandboxViolation.
        """

        test_setup_attrs = {
            'name': 'test_pkg', 'version': '0.0',
            'setup_requires': ['foobar'],
            'dependency_links': [os.path.abspath(self.dir)]
        }

        test_pkg = os.path.join(self.dir, 'test_pkg')
        test_setup_py = os.path.join(test_pkg, 'setup.py')
        os.mkdir(test_pkg)

        f = open(test_setup_py, 'w')
        f.write(textwrap.dedent("""\
            import setuptools
            setuptools.setup(**%r)
        """ % test_setup_attrs))
        f.close()

        foobar_path = os.path.join(self.dir, 'foobar-0.1.tar.gz')
        make_trivial_sdist(
            foobar_path,
            textwrap.dedent("""\
                import setuptools
                setuptools.setup(
                    name='foobar',
                    version='0.1'
                )
            """))

        old_stdout = sys.stdout
        old_stderr = sys.stderr
        sys.stdout = StringIO()
        sys.stderr = StringIO()
        try:
            try:
                with reset_setup_stop_context():
                    run_setup(test_setup_py, ['install'])
            except SandboxViolation:
                self.fail('Installation caused SandboxViolation')
        finally:
            sys.stdout = old_stdout
            sys.stderr = old_stderr
def test_bootstrap_from_submodule(tmpdir, testpackage, capsys):
    """
    Tests importing _astropy_helpers_test_ from a submodule in a git
    repository.  This tests actually performing a fresh clone of the repository
    without the submodule initialized, and that importing astropy_helpers in
    that context works transparently after calling
    `ah_boostrap.use_astropy_helpers`.
    """

    orig_repo = tmpdir.mkdir('orig')

    # Ensure ah_bootstrap is imported from the local directory
    import ah_bootstrap

    with orig_repo.as_cwd():
        run_cmd('git', ['init'])

        # Write a test setup.py that uses ah_bootstrap; it also ensures that
        # any previous reference to astropy_helpers is first wiped from
        # sys.modules
        orig_repo.join('setup.py').write(TEST_SETUP_PY.format(args=''))
        run_cmd('git', ['add', 'setup.py'])

        # Add our own clone of the astropy_helpers repo as a submodule named
        # astropy_helpers
        run_cmd('git', ['submodule', 'add', str(testpackage),
                        '_astropy_helpers_test_'])

        run_cmd('git', ['commit', '-m', 'test repository'])

        os.chdir(str(tmpdir))

        # Creates a clone of our test repo in the directory 'clone'
        run_cmd('git', ['clone', 'orig', 'clone'])

        os.chdir('clone')

        run_setup('setup.py', [])

        stdout, stderr = capsys.readouterr()
        path = stdout.strip()

        # Ensure that the astropy_helpers used by the setup.py is the one that
        # was imported from git submodule
        a = os.path.normcase(path)
        b = os.path.normcase(str(tmpdir.join('clone', '_astropy_helpers_test_',
                                             '_astropy_helpers_test_',
                                             '__init__.py')))
        assert a == b
Example #12
0
 def run_setup(self, setup_script, setup_base, args):
     args = list(args)
     if self.verbose>2:
         v = 'v' * (self.verbose - 1)
         args.insert(0,'-'+v)
     elif self.verbose<2:
         args.insert(0,'-q')
     if self.dry_run:
         args.insert(0,'-n')
     log.info(
         "Running %s %s", setup_script[len(setup_base)+1:], ' '.join(args)
     )
     try:
         run_setup(setup_script, args)
     except SystemExit, v:
         raise DistutilsError("Setup script exited with %s" % (v.args[0],))
Example #13
0
    def run_setup(self, *args):
        old_stdout = sys.stdout
        old_stderr = sys.stderr
        stdout = sys.stdout = StringIO()
        stderr = sys.stderr = StringIO()
        try:
            run_setup('setup.py', args)
            returncode = 0
        except SystemExit as e:
            returncode = e.args[0]
        finally:
            sys.stdout = old_stdout
            sys.stderr = old_stderr

        return (stdout.getvalue().strip(), stderr.getvalue().strip(),
                returncode)
def test_update_git_devstr(package_template, capsys):
    """Tests that the commit number in the package's version string updates
    after git commits even without re-running setup.py.
    """

    run_setup('setup.py', ['--version'])

    stdout, stderr = capsys.readouterr()
    version = stdout.strip()

    m = _DEV_VERSION_RE.match(version)
    assert m, (
        "Stdout did not match the version string pattern:"
        "\n\n{0}\n\nStderr:\n\n{1}".format(stdout, stderr))
    revcount = int(m.group(1))

    import packagename
    assert packagename.__version__ == version

    # Make a silly git commit
    with open('.test', 'w'):
        pass

    run_cmd('git', ['add', '.test'])
    run_cmd('git', ['commit', '-m', 'test'])

    import packagename.version
    imp.reload(packagename.version)

    # Previously this checked packagename.__version__, but in order for that to
    # be updated we also have to re-import _astropy_init which could be tricky.
    # Checking directly that the packagename.version module was updated is
    # sufficient:
    m = _DEV_VERSION_RE.match(packagename.version.version)
    assert m
    assert int(m.group(1)) == revcount + 1

    # This doesn't test astropy_helpers.get_helpers.update_git_devstr directly
    # since a copy of that function is made in packagename.version (so that it
    # can work without astropy_helpers installed).  In order to get test
    # coverage on the actual astropy_helpers copy of that function just call it
    # directly and compare to the value in packagename
    from astropy_helpers.git_helpers import update_git_devstr

    newversion = update_git_devstr(version, path=str(package_template))
    assert newversion == packagename.version.version
def test_check_submodule_no_git(tmpdir, testpackage):
    """
    Tests that when importing astropy_helpers from a submodule, it is still
    recognized as a submodule even when using the --no-git option.

    In particular this ensures that the auto-upgrade feature is not activated.
    """

    orig_repo = tmpdir.mkdir('orig')

    # Ensure ah_bootstrap is imported from the local directory
    import ah_bootstrap

    with orig_repo.as_cwd():
        run_cmd('git', ['init'])

        # Write a test setup.py that uses ah_bootstrap; it also ensures that
        # any previous reference to astropy_helpers is first wiped from
        # sys.modules
        args = 'auto_upgrade=True'
        orig_repo.join('setup.py').write(TEST_SETUP_PY.format(args=args))
        run_cmd('git', ['add', 'setup.py'])

        # Add our own clone of the astropy_helpers repo as a submodule named
        # astropy_helpers
        run_cmd('git', ['submodule', 'add', str(testpackage),
                        '_astropy_helpers_test_'])

        run_cmd('git', ['commit', '-m', 'test repository'])

        # Temporarily patch _do_upgrade to fail if called
        class UpgradeError(Exception):
            pass

        def _do_upgrade(*args, **kwargs):
            raise UpgradeError()

        orig_do_upgrade = ah_bootstrap._do_upgrade
        ah_bootstrap._do_upgrade = _do_upgrade
        try:
            run_setup('setup.py', ['--no-git'])
        except UpgradeError:
            pytest.fail('Attempted to run auto-upgrade despite importing '
                        '_astropy_helpers_test_ from a git submodule')
        finally:
            ah_bootstrap._do_upgrade = orig_do_upgrade
    def test_setup_requires(self):
        """Regression test for Distribute issue #318

        Ensure that a package with setup_requires can be installed when
        setuptools is installed in the user site-packages without causing a
        SandboxViolation.
        """

        test_pkg = create_setup_requires_package(self.dir)
        test_setup_py = os.path.join(test_pkg, 'setup.py')

        try:
            with quiet_context():
                with reset_setup_stop_context():
                    run_setup(test_setup_py, ['install'])
        except SandboxViolation:
            self.fail('Installation caused SandboxViolation')
Example #17
0
def _bundle_local_bentoml_if_installed_from_source(target_path):
    """
    if bentoml is installed in editor mode(pip install -e), this will build a source
    distribution with the local bentoml fork and add it to saved BentoService bundle
    path under bundled_pip_dependencies directory
    """

    # Find bentoml module path
    (module_location,) = importlib.util.find_spec('bentoml').submodule_search_locations

    bentoml_setup_py = os.path.abspath(os.path.join(module_location, '..', 'setup.py'))

    # this is for BentoML developer to create BentoService containing custom develop
    # branches of BentoML library, it is True only when BentoML module is installed in
    # development mode via "pip install --editable ."
    if not _is_pip_installed_bentoml() and os.path.isfile(bentoml_setup_py):
        logger.info(
            "Detected non-PyPI-released BentoML installed, copying local BentoML module"
            "files to target saved bundle path.."
        )

        # Create tmp directory inside bentoml module for storing the bundled
        # targz file. Since dist-dir can only be inside of the module directory
        bundle_dir_name = '__bentoml_tmp_sdist_build'
        source_dir = os.path.abspath(
            os.path.join(module_location, '..', bundle_dir_name)
        )

        if os.path.isdir(source_dir):
            shutil.rmtree(source_dir, ignore_errors=True)
        os.mkdir(source_dir)

        from setuptools import sandbox

        sandbox.run_setup(
            bentoml_setup_py,
            ['-q', 'sdist', '--format', 'gztar', '--dist-dir', bundle_dir_name],
        )

        # copy the generated targz to saved bundle directory and remove it from
        # bentoml module directory
        shutil.copytree(source_dir, target_path)

        # clean up sdist build files
        shutil.rmtree(source_dir)
Example #18
0
 def test_setup_requires_with_transitive_extra_dependency(
         self, monkeypatch):
     # Use case: installing a package with a build dependency on
     # an already installed `dep[extra]`, which in turn depends
     # on `extra_dep` (whose is not already installed).
     with contexts.save_pkg_resources_state():
         with contexts.tempdir() as temp_dir:
             # Create source distribution for `extra_dep`.
             make_trivial_sdist(
                 os.path.join(temp_dir, 'extra_dep-1.0.tar.gz'),
                 'extra_dep', '1.0')
             # Create source tree for `dep`.
             dep_pkg = os.path.join(temp_dir, 'dep')
             os.mkdir(dep_pkg)
             path.build({
                 'setup.py':
                 DALS("""
                       import setuptools
                       setuptools.setup(
                           name='dep', version='2.0',
                           extras_require={'extra': ['extra_dep']},
                       )
                      """),
                 'setup.cfg': '',
             }, prefix=dep_pkg)
             # "Install" dep.
             run_setup(
                 os.path.join(dep_pkg, 'setup.py'), [str('dist_info')])
             working_set.add_entry(dep_pkg)
             # Create source tree for test package.
             test_pkg = os.path.join(temp_dir, 'test_pkg')
             test_setup_py = os.path.join(test_pkg, 'setup.py')
             os.mkdir(test_pkg)
             with open(test_setup_py, 'w') as fp:
                 fp.write(DALS(
                     '''
                     from setuptools import installer, setup
                     setup(setup_requires='dep[extra]')
                     '''))
             # Check...
             monkeypatch.setenv(str('PIP_FIND_LINKS'), str(temp_dir))
             monkeypatch.setenv(str('PIP_NO_INDEX'), str('1'))
             monkeypatch.setenv(str('PIP_RETRIES'), str('0'))
             monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
             run_setup(test_setup_py, [str('--version')])
Example #19
0
def test_update_git_devstr(package_template, capsys):
    """Tests that the commit number in the package's version string updates
    after git commits even without re-running setup.py.
    """

    run_setup('setup.py', ['--version'])

    stdout, stderr = capsys.readouterr()
    version = stdout.strip()

    m = _DEV_VERSION_RE.match(version)
    assert m, ("Stdout did not match the version string pattern:"
               "\n\n{0}\n\nStderr:\n\n{1}".format(stdout, stderr))
    revcount = int(m.group(1))

    import packagename
    assert packagename.__version__ == version

    # Make a silly git commit
    with open('.test', 'w'):
        pass

    run_cmd('git', ['add', '.test'])
    run_cmd('git', ['commit', '-m', 'test'])

    import packagename.version
    imp.reload(packagename.version)

    # Previously this checked packagename.__version__, but in order for that to
    # be updated we also have to re-import _astropy_init which could be tricky.
    # Checking directly that the packagename.version module was updated is
    # sufficient:
    m = _DEV_VERSION_RE.match(packagename.version.version)
    assert m
    assert int(m.group(1)) == revcount + 1

    # This doesn't test astropy_helpers.get_helpers.update_git_devstr directly
    # since a copy of that function is made in packagename.version (so that it
    # can work without astropy_helpers installed).  In order to get test
    # coverage on the actual astropy_helpers copy of that function just call it
    # directly and compare to the value in packagename
    from astropy_helpers.git_helpers import update_git_devstr

    newversion = update_git_devstr(version, path=str(package_template))
    assert newversion == packagename.version.version
 def test_setup_requires_with_pep508_url(self, mock_index, monkeypatch):
     monkeypatch.setenv(str('PIP_RETRIES'), str('0'))
     monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
     monkeypatch.setenv(str('PIP_INDEX_URL'), mock_index.url)
     with contexts.save_pkg_resources_state():
         with contexts.tempdir() as temp_dir:
             dep_sdist = os.path.join(temp_dir, 'dep.tar.gz')
             make_trivial_sdist(dep_sdist, 'dependency', '42')
             dep_url = path_to_url(dep_sdist, authority='localhost')
             test_pkg = create_setup_requires_package(
                 temp_dir,
                 # Ignored (overriden by setup_attrs)
                 'python-xlib', '0.19',
                 setup_attrs=dict(
                     setup_requires='dependency @ %s' % dep_url))
             test_setup_py = os.path.join(test_pkg, 'setup.py')
             run_setup(test_setup_py, [str('--version')])
     assert len(mock_index.requests) == 0
def test_bootstrap_from_archive(tmpdir, testpackage, capsys):
    """
    Tests importing _astropy_helpers_test_ from a .tar.gz source archive
    shipped alongside the package that uses it.
    """

    orig_repo = tmpdir.mkdir('orig')

    # Ensure ah_bootstrap is imported from the local directory
    import ah_bootstrap

    # Make a source distribution of the test package
    with silence():
        run_setup(str(testpackage.join('setup.py')),
                  ['sdist', '--dist-dir=dist', '--formats=gztar'])

    dist_dir = testpackage.join('dist')
    for dist_file in dist_dir.visit('*.tar.gz'):
        dist_file.copy(orig_repo)

    with orig_repo.as_cwd():
        # Write a test setup.py that uses ah_bootstrap; it also ensures that
        # any previous reference to astropy_helpers is first wiped from
        # sys.modules
        args = 'path={0!r}'.format(os.path.basename(str(dist_file)))
        orig_repo.join('setup.py').write(TEST_SETUP_PY.format(args=args))

        run_setup('setup.py', [])

        stdout, stderr = capsys.readouterr()
        path = stdout.splitlines()[-1].strip()

        # Installation from the .tar.gz should have resulted in a .egg
        # directory that the _astropy_helpers_test_ package was imported from
        eggs = glob.glob('*.egg')
        assert eggs
        egg = orig_repo.join(eggs[0])
        assert os.path.isdir(str(egg))

        a = os.path.normcase(path)
        b = os.path.normcase(str(egg.join('_astropy_helpers_test_',
                                          '__init__.py')))

        assert a == b
Example #22
0
def _submit_train_job(gcs_working_dir, version, params, region, scale_tier):
    """Module that submits a training job."""
    # Run package setup
    sandbox.run_setup('setup.py', ['-q', 'sdist'])
    shutil.rmtree('trainer.egg-info')  # Cleanup the directory not needed
    # Copy package to GCS package path
    package_path = '{}/packages/{}'.format(gcs_working_dir, PACKAGE_NAME)
    _copy_to_gcs(os.path.join('dist', PACKAGE_NAME), package_path)

    trainer_flags = [
        '--gcs_working_dir', gcs_working_dir,
        '--version', '{}'.format(version),
        '--num_epochs', str(params['num_epochs']),
        '--epochs_per_checkpoint', str(params['epochs_per_checkpoint']),
        '--model_name', params['model_name']
    ]

    training_inputs = {
        'jobDir': gcs_working_dir,
        'packageUris': package_path,
        'pythonModule': 'cmle.trainer.task',
        'args': trainer_flags,
        'region': region
    }
    if scale_tier:
        training_inputs['scale_tier'] = scale_tier

    jobid = 'job_' + datetime.datetime.fromtimestamp(
        time.time()).strftime('%Y%m%d_%H%M%S')
    job_spec = {'jobId': jobid, 'trainingInput': training_inputs}

    _, project_name = google.auth.default()
    project_id = 'projects/{}'.format(project_name)
    cloudml = discovery.build('ml', 'v1',  cache_discovery=False)
    request = cloudml.projects().jobs().create(body=job_spec,
                                               parent=project_id)
    try:
        request.execute()
    except errors.HttpError, err:
        print('There was an error creating the training job.'
              'Check the details:')
        print(err._get_reason())
        sys.exit(1)
Example #23
0
 def run_setup(self, *args, **kwargs):
     """In Python 3, on MacOS X, the import cache has to be invalidated
     otherwise new extensions built with ``run_setup`` do not always get
     picked up.
     """
     try:
         return sandbox.run_setup(*args, **kwargs)
     finally:
         import importlib
         importlib.invalidate_caches()
Example #24
0
    def test_setup_requires(self):
        """Regression test for Distribute issue #318

        Ensure that a package with setup_requires can be installed when
        setuptools is installed in the user site-packages without causing a
        SandboxViolation.
        """

        test_pkg = create_setup_requires_package(os.getcwd())
        test_setup_py = os.path.join(test_pkg, 'setup.py')

        try:
            with contexts.quiet():
                with self.patched_setup_context():
                    run_setup(test_setup_py, ['install'])
        except IndexError:
            # Test fails in some cases due to bugs in Python
            # See https://bitbucket.org/pypa/setuptools/issue/201
            pass
Example #25
0
    def test_setup_requires(self):
        """Regression test for Distribute issue #318

        Ensure that a package with setup_requires can be installed when
        setuptools is installed in the user site-packages without causing a
        SandboxViolation.
        """

        test_pkg = create_setup_requires_package(os.getcwd())
        test_setup_py = os.path.join(test_pkg, 'setup.py')

        try:
            with contexts.quiet():
                with self.patched_setup_context():
                    run_setup(test_setup_py, ['install'])
        except IndexError:
            # Test fails in some cases due to bugs in Python
            # See https://bitbucket.org/pypa/setuptools/issue/201
            pass
    def package_extension(cls, path_to_src, custom_display_name=None, keep_build_dir=False):
        """ Function that creates The Extension.zip file from the give source path and returns
        the path to the new Extension.zip
        - path_to_src [String]: must include a setup.py, customize.py and config.py file.
        - custom_display_name [String]: will give the Extension that display name. Default: name from setup.py file
        - keep_build_dir [Boolean]: if True, dist/build/ will not be remove. Default: False
        - The code will be packaged into a Built Distribution (.tar.gz) in the /dist directory
        - The Extension.zip will also be produced in the /dist directory"""

        # Ensure the src directory exists and we have WRITE access
        cls.__validate_directory__(os.W_OK, path_to_src)

        # Generate paths to files required to create extension
        path_setup_py_file = os.path.join(path_to_src, BASE_NAME_SETUP_PY)
        path_customize_py_file = os.path.join(path_to_src, os.path.basename(path_to_src), PATH_CUSTOMIZE_PY)
        path_config_py_file = os.path.join(path_to_src, os.path.basename(path_to_src), PATH_CONFIG_PY)
        path_output_dir = os.path.join(path_to_src, BASE_NAME_DIST_DIR)
        path_extension_logo = os.path.join(path_to_src, PATH_ICON_EXTENSION_LOGO)
        path_company_logo = os.path.join(path_to_src, PATH_ICON_COMPANY_LOGO)

        LOG.info("Creating Built Distribution in /dist directory")

        # Create the built distribution
        use_setuptools.run_setup(setup_script=path_setup_py_file, args=["sdist", "--formats=gztar"])

        LOG.info("Built Distribution (.tar.gz) created at: %s", path_output_dir)

        # Create the extension
        path_the_extension_zip = cls.create_extension(
            path_setup_py_file=path_setup_py_file,
            path_customize_py_file=path_customize_py_file,
            path_config_py_file=path_config_py_file,
            output_dir=path_output_dir,
            custom_display_name=custom_display_name,
            keep_build_dir=keep_build_dir,
            path_extension_logo=path_extension_logo,
            path_company_logo=path_company_logo
        )

        LOG.info("Extension created at: %s", path_the_extension_zip)

        return path_the_extension_zip
Example #27
0
def test_create_extension_display_name(fx_copy_fn_main_mock_integration):

    path_fn_main_mock_integration = fx_copy_fn_main_mock_integration[1]

    path_setup_py_file = os.path.join(path_fn_main_mock_integration,
                                      package_helpers.BASE_NAME_SETUP_PY)
    path_apiky_permissions_file = os.path.join(
        path_fn_main_mock_integration,
        package_helpers.BASE_NAME_APIKEY_PERMS_FILE)
    output_dir = os.path.join(path_fn_main_mock_integration,
                              package_helpers.BASE_NAME_DIST_DIR)

    use_setuptools.run_setup(setup_script=path_setup_py_file,
                             args=["sdist", "--formats=gztar"])

    path_app_zip = package_helpers.create_extension(
        path_setup_py_file, path_apiky_permissions_file, output_dir)
    app_json = json.loads(sdk_helpers.read_zip_file(path_app_zip, "app.json"))

    assert app_json.get("display_name") == "Main Mock Integration"
Example #28
0
def run_setup(*args, **kwargs):
    """
    In Python 3, on MacOS X, the import cache has to be invalidated otherwise
    new extensions built with ``run_setup`` do not always get picked up.
    """

    try:
        return sandbox.run_setup(*args, **kwargs)
    finally:
        if sys.version_info[:2] >= (3, 3):
            import importlib
            importlib.invalidate_caches()
Example #29
0
 def test_setup_requires_honors_pip_env(self, mock_index, monkeypatch):
     monkeypatch.setenv(str('PIP_RETRIES'), str('0'))
     monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
     monkeypatch.setenv(str('PIP_INDEX_URL'), mock_index.url)
     with contexts.save_pkg_resources_state():
         with contexts.tempdir() as temp_dir:
             test_pkg = create_setup_requires_package(
                 temp_dir, 'python-xlib', '0.19',
                 setup_attrs=dict(dependency_links=[]))
             test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
             with open(test_setup_cfg, 'w') as fp:
                 fp.write(DALS(
                     '''
                     [easy_install]
                     index_url = https://pypi.org/legacy/
                     '''))
             test_setup_py = os.path.join(test_pkg, 'setup.py')
             with pytest.raises(distutils.errors.DistutilsError):
                 run_setup(test_setup_py, [str('--version')])
     assert len(mock_index.requests) == 1
     assert mock_index.requests[0].path == '/python-xlib/'
Example #30
0
 def test_setup_requires_with_allow_hosts(self, mock_index):
     ''' The `allow-hosts` option in not supported anymore. '''
     files = {
         'test_pkg': {
             'setup.py': DALS('''
                 from setuptools import setup
                 setup(setup_requires='python-xlib')
                 '''),
             'setup.cfg': DALS('''
                 [easy_install]
                 allow_hosts = *
                 '''),
         }
     }
     with contexts.save_pkg_resources_state():
         with contexts.tempdir() as temp_dir:
             path.build(files, prefix=temp_dir)
             setup_py = str(pathlib.Path(temp_dir, 'test_pkg', 'setup.py'))
             with pytest.raises(distutils.errors.DistutilsError):
                 run_setup(setup_py, [str('--version')])
     assert len(mock_index.requests) == 0
Example #31
0
def test_bootstrap_from_archive(tmpdir, testpackage, capsys):
    """
    Tests importing _astropy_helpers_test_ from a .tar.gz source archive
    shipped alongside the package that uses it.
    """

    orig_repo = tmpdir.mkdir('orig')

    # Ensure ah_bootstrap is imported from the local directory
    import ah_bootstrap

    # Make a source distribution of the test package
    with silence():
        run_setup(str(testpackage.join('setup.py')),
                  ['sdist', '--dist-dir=dist', '--formats=gztar'])

    dist_dir = testpackage.join('dist')
    for dist_file in dist_dir.visit('*.tar.gz'):
        dist_file.copy(orig_repo)

    with orig_repo.as_cwd():
        # Write a test setup.py that uses ah_bootstrap; it also ensures that
        # any previous reference to astropy_helpers is first wiped from
        # sys.modules
        args = 'path={0!r}'.format(os.path.basename(str(dist_file)))
        orig_repo.join('setup.py').write(TEST_SETUP_PY.format(args=args))

        run_setup('setup.py', [])

        stdout, stderr = capsys.readouterr()
        path = stdout.splitlines()[-1].strip()

        # Installation from the .tar.gz should have resulted in a .egg
        # directory that the _astropy_helpers_test_ package was imported from
        eggs = glob.glob('*.egg')
        assert eggs
        egg = orig_repo.join(eggs[0])
        assert os.path.isdir(str(egg))

        assert path == str(egg.join('_astropy_helpers_test_', '__init__.py'))
Example #32
0
    def test_setup_requires(self):
        """Regression test for Distribute issue #318

        Ensure that a package with setup_requires can be installed when
        setuptools is installed in the user site-packages without causing a
        SandboxViolation.
        """

        test_setup_attrs = {
            'name': 'test_pkg',
            'version': '0.0',
            'setup_requires': ['foobar'],
            'dependency_links': [os.path.abspath(self.dir)]
        }

        test_pkg = os.path.join(self.dir, 'test_pkg')
        test_setup_py = os.path.join(test_pkg, 'setup.py')
        test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
        os.mkdir(test_pkg)

        f = open(test_setup_py, 'w')
        f.write(
            textwrap.dedent("""\
            import setuptools
            setuptools.setup(**%r)
        """ % test_setup_attrs))
        f.close()

        foobar_path = os.path.join(self.dir, 'foobar-0.1.tar.gz')
        make_trivial_sdist(
            foobar_path,
            textwrap.dedent("""\
                import setuptools
                setuptools.setup(
                    name='foobar',
                    version='0.1'
                )
            """))

        old_stdout = sys.stdout
        old_stderr = sys.stderr
        sys.stdout = StringIO()
        sys.stderr = StringIO()
        try:
            try:
                reset_setup_stop_context(
                    lambda: run_setup(test_setup_py, ['install']))
            except SandboxViolation:
                self.fail('Installation caused SandboxViolation')
        finally:
            sys.stdout = old_stdout
            sys.stderr = old_stderr
Example #33
0
    def create_package_from_setup_py(self, path_to_setup_py,
                                     custom_package_name):
        """
        Creates a package from Setup.py specified in the the given path
        :param path_to_setup_py:
        :return: Package Information: Contains package name, version and path
        """
        if os.path.isfile(path_to_setup_py):
            path_to_setup_py = os.path.abspath(path_to_setup_py)
            setup_py_directory = os.path.dirname(path_to_setup_py)
        elif os.path.isdir(path_to_setup_py):
            setup_py_directory = os.path.abspath(path_to_setup_py)
            path_to_setup_py = setup_py_directory + '/setup.py'
        else:
            raise Exception("Given path/file: " + path_to_setup_py +
                            " doesn't exist")

        if not os.path.exists(path_to_setup_py):
            raise Exception("setup.py doesn't exist in the given path: " +
                            path_to_setup_py)

        sandbox.run_setup(path_to_setup_py, ['sdist'])
        eggs = find_distributions(setup_py_directory)

        version = None
        for egg in eggs:
            if custom_package_name in egg.egg_name():
                version = egg.version

        if not version:
            raise Exception("{package_name} not found".format(
                package_name=custom_package_name))

        package_info = dict()
        package_info[
            'path'] = setup_py_directory + "/dist/" + custom_package_name + "-" + version + ".tar.gz"
        package_info['version'] = version
        package_info['name'] = custom_package_name
        return package_info
Example #34
0
 def test_setup_requires_with_find_links_in_setup_cfg(
         self, monkeypatch, use_legacy_installer,
         with_dependency_links_in_setup_py):
     monkeypatch.setenv(str('PIP_RETRIES'), str('0'))
     monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
     with contexts.save_pkg_resources_state():
         with contexts.tempdir() as temp_dir:
             make_trivial_sdist(
                 os.path.join(temp_dir, 'python-xlib-42.tar.gz'),
                 'python-xlib', '42')
             test_pkg = os.path.join(temp_dir, 'test_pkg')
             test_setup_py = os.path.join(test_pkg, 'setup.py')
             test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
             os.mkdir(test_pkg)
             with open(test_setup_py, 'w') as fp:
                 if with_dependency_links_in_setup_py:
                     dependency_links = [os.path.join(temp_dir, 'links')]
                 else:
                     dependency_links = []
                 fp.write(
                     DALS('''
                     from setuptools import installer, setup
                     if {use_legacy_installer}:
                         installer.fetch_build_egg = installer._legacy_fetch_build_egg
                     setup(setup_requires='python-xlib==42',
                     dependency_links={dependency_links!r})
                     ''').format(
                         use_legacy_installer=use_legacy_installer,  # noqa
                         dependency_links=dependency_links))
             with open(test_setup_cfg, 'w') as fp:
                 fp.write(
                     DALS('''
                     [easy_install]
                     index_url = {index_url}
                     find_links = {find_links}
                     ''').format(index_url=os.path.join(temp_dir, 'index'),
                                 find_links=temp_dir))
             run_setup(test_setup_py, [str('--version')])
Example #35
0
    def run(self):
        """
        Remove previous builds, and build source and wheel distributions.

        Also upload to pypi and push git tags.
        """
        try:
            self.status('Removing previous builds…')
            rmtree(os.path.join(here, 'dist'))
        except OSError:
            pass

        self.status('Building Source and Wheel (universal) distribution…')
        sandbox.run_setup('setup.py', ['sdist', 'bdist_wheel', '--universal'])

        self.status('Uploading the package to PyPI via Twine…')
        twineupload("dist/*")
        self.status('Pushing git tags…')
        repo = Repo(os.getcwd())
        tag_name = 'v{0}'.format(about['__version__'])
        repo.create_tag(tag_name)
        repo.remote('origin').push(tag_name)
        sys.exit()
Example #36
0
    def _build_windows_distribution(self):
        """Create Windows source distribution

        Contains executable installer and examples"""

        import versioneer

        version = versioneer.get_version()

        self._remove_manifest()
        run_setup("setup.py", ["bdist_wininst"])

        self._remove_manifest()

        shutil.copyfile("MANIFEST-WINDOWS.in", "MANIFEST.in")
        run_setup("setup.py",
                  ["sdist", "--dist-dir=dist-windows", "--formats=zip"])
        shutil.move(
            os.path.join("dist-windows", "FiPy-{}.zip".format(version)),
            os.path.join("dist", "FiPy-{}.win32.zip".format(version)),
        )
        os.rmdir("dist-windows")
        os.remove("MANIFEST.in")
 def test_setup_requires_with_allow_hosts(self, mock_index):
     ''' The `allow-hosts` option in not supported anymore. '''
     with contexts.save_pkg_resources_state():
         with contexts.tempdir() as temp_dir:
             test_pkg = os.path.join(temp_dir, 'test_pkg')
             test_setup_py = os.path.join(test_pkg, 'setup.py')
             test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
             os.mkdir(test_pkg)
             with open(test_setup_py, 'w') as fp:
                 fp.write(DALS(
                     '''
                     from setuptools import setup
                     setup(setup_requires='python-xlib')
                     '''))
             with open(test_setup_cfg, 'w') as fp:
                 fp.write(DALS(
                     '''
                     [easy_install]
                     allow_hosts = *
                     '''))
             with pytest.raises(distutils.errors.DistutilsError):
                 run_setup(test_setup_py, [str('--version')])
     assert len(mock_index.requests) == 0
Example #38
0
        def setup_and_run(temp_dir):
            test_pkg = create_setup_requires_package(temp_dir)
            test_setup_py = os.path.join(test_pkg, 'setup.py')
            try:
                stdout, stderr = quiet_context(lambda: reset_setup_stop_context(
                    # Don't even need to install the package, just running
                    # the setup.py at all is sufficient
                    lambda: run_setup(test_setup_py, ['--name'])))
            except VersionConflict:
                self.fail('Installing setup.py requirements caused '
                          'VersionConflict')

            lines = stdout.splitlines()
            self.assertGreater(len(lines), 0)
            self.assert_(lines[-1].strip(), 'test_pkg')
Example #39
0
    def test_setup_requires(self):
        """Regression test for issue #318

        Ensures that a package with setup_requires can be installed when
        distribute is installed in the user site-packages without causing a
        SandboxViolation.
        """

        test_pkg = create_setup_requires_package(self.dir)
        test_setup_py = os.path.join(test_pkg, 'setup.py')

        try:
            quiet_context(lambda: reset_setup_stop_context(lambda: run_setup(
                test_setup_py, ['install'])))
        except SandboxViolation:
            self.fail('Installation caused SandboxViolation')
Example #40
0
        def setup_and_run(temp_dir):
            test_pkg = create_setup_requires_package(temp_dir)
            test_setup_py = os.path.join(test_pkg, 'setup.py')
            try:
                stdout, stderr = quiet_context(
                    lambda: reset_setup_stop_context(
                        # Don't even need to install the package, just running
                        # the setup.py at all is sufficient
                        lambda: run_setup(test_setup_py, ['--name'])
                ))
            except VersionConflict:
                self.fail('Installing setup.py requirements caused '
                          'VersionConflict')

            lines = stdout.splitlines()
            self.assertGreater(len(lines), 0)
            self.assert_(lines[-1].strip(), 'test_pkg')
Example #41
0
from modules.singleton import mrcnn, graph
from modules import utility
from models import result
from models.response_models import DetectionResult, DetectedBox, MatchScore, MatchResult
from setuptools import sandbox
from scipy.spatial.distance import cosine
import numpy as np
import os
import base64
try:
    from . import ColorPy
except ImportError:
    # setup ColorPy
    SETUP_DIR = os.path.dirname(os.path.abspath(__file__))
    sandbox.run_setup(SETUP_DIR + '/setup.py', ['build_ext', '--inplace'])
    from . import ColorPy


def detect(images):
    try:
        # init variables
        flag = False
        TARGET_SIZE = (448, 448)
        CONFIDENCE_THRESHOLD = 0.60

        # init result list
        match_result = list()

        # load model & classes
        model = mrcnn.get_model()
        classes = mrcnn.get_classes()
Example #42
0
 def run(self):
     from setuptools import sandbox
     sandbox.run_setup('setup.py', ['bdist_wheel', '--python-tag', 'py3'])
Example #43
0
    def run(self, config, args):
        packages = []
        recipes = []
        bundle_recipes = []
        bundle_dirs = []
        setup_args = ['sdist']

        if not config.uninstalled:
            m.error("Can only be run on cerbero-uninstalled")

        store = PackagesStore(config)
        cookbook = CookBook(config)

        packages = list(args.bundlepackages)

        for p in packages:
            package = store.get_package(p)
            if hasattr(package, 'list_packages'):
                packages += package.list_packages()
        packages = remove_list_duplicates(packages)

        for p in packages:
            package = store.get_package(p)
            if hasattr(package, 'deps'):
                packages += package.deps
        packages = remove_list_duplicates(packages)

        for p in packages:
            package = store.get_package(p)
            recipes += package.recipes_dependencies()
        recipes += args.add_recipe

        for r in recipes:
            bundle_recipes += cookbook.list_recipe_deps(r)
        bundle_recipes = remove_list_duplicates(bundle_recipes)

        for p in packages:
            setup_args.append('--package=' + p)

        for r in bundle_recipes:
            setup_args.append('--recipe=' + r.name)
            if r.stype != SourceType.CUSTOM:
                bundle_dirs.append(r.repo_dir)

        if not args.no_bootstrap:
            build_tools = BuildTools(config)
            bs_recipes = build_tools.BUILD_TOOLS + \
                         build_tools.PLAT_BUILD_TOOLS.get(config.platform, [])
            b_recipes = []
            for r in bs_recipes:
                b_recipes += cookbook.list_recipe_deps(r)
            b_recipes = remove_list_duplicates(b_recipes)

            for r in b_recipes:
                if r.stype != SourceType.CUSTOM:
                    bundle_dirs.append(r.repo_dir)

        setup_args.append('--source-dirs=' + ','.join(bundle_dirs))

        command = str(config._relative_path('setup.py'))
        run_setup(command, setup_args)
 def test_bdist_egg_available_on_distutils_pkg(self, distutils_package):
     run_setup('setup.py', ['bdist_egg'])
Example #45
0
    'regex',
    'requests',
    'torch',
]

# torch.hub doesn't build Cython components, so if they are not found then try
# to build them here
try:
    import fairseq.data.token_block_utils_fast
except (ImportError, ModuleNotFoundError):
    try:
        import cython
        import os
        from setuptools import sandbox
        sandbox.run_setup(
            os.path.join(os.path.dirname(__file__), 'setup.py'),
            ['build_ext', '--inplace'],
        )
    except (ImportError, ModuleNotFoundError):
        print(
            'Unable to build Cython components. Please make sure Cython is '
            'installed if the torch.hub model you are loading depends on it.')

for _model_type, _cls in MODEL_REGISTRY.items():
    for model_name in _cls.hub_models().keys():
        globals()[model_name] = functools.partial(
            _cls.from_pretrained,
            model_name,
        )
    # to simplify the interface we only expose named models
    # globals()[_model_type] = _cls.from_pretrained
Example #46
0
    def test_setup_requires_override_nspkg(self, use_setup_cfg):
        """
        Like ``test_setup_requires_overrides_version_conflict`` but where the
        ``setup_requires`` package is part of a namespace package that has
        *already* been imported.
        """

        with contexts.save_pkg_resources_state():
            with contexts.tempdir() as temp_dir:
                foobar_1_archive = os.path.join(temp_dir, 'foo.bar-0.1.tar.gz')
                make_nspkg_sdist(foobar_1_archive, 'foo.bar', '0.1')
                # Now actually go ahead an extract to the temp dir and add the
                # extracted path to sys.path so foo.bar v0.1 is importable
                foobar_1_dir = os.path.join(temp_dir, 'foo.bar-0.1')
                os.mkdir(foobar_1_dir)
                with tarfile.open(foobar_1_archive) as tf:
                    tf.extractall(foobar_1_dir)
                sys.path.insert(1, foobar_1_dir)

                dist = PRDistribution(foobar_1_dir,
                                      project_name='foo.bar',
                                      version='0.1')
                working_set.add(dist)

                template = DALS("""\
                    import foo  # Even with foo imported first the
                                # setup_requires package should override
                    import setuptools
                    setuptools.setup(**%r)

                    if not (hasattr(foo, '__path__') and
                            len(foo.__path__) == 2):
                        print('FAIL')

                    if 'foo.bar-0.2' not in foo.__path__[0]:
                        print('FAIL')
                """)

                test_pkg = create_setup_requires_package(
                    temp_dir,
                    'foo.bar',
                    '0.2',
                    make_nspkg_sdist,
                    template,
                    use_setup_cfg=use_setup_cfg)

                test_setup_py = os.path.join(test_pkg, 'setup.py')

                with contexts.quiet() as (stdout, stderr):
                    try:
                        # Don't even need to install the package, just
                        # running the setup.py at all is sufficient
                        run_setup(test_setup_py, [str('--name')])
                    except pkg_resources.VersionConflict:
                        self.fail('Installing setup.py requirements '
                                  'caused a VersionConflict')

                assert 'FAIL' not in stdout.getvalue()
                lines = stdout.readlines()
                assert len(lines) > 0
                assert lines[-1].strip() == 'test_pkg'
def test_upgrade(tmpdir, capsys):
    # Run the testpackage fixture manually, since we use it multiple times in
    # this test to make different versions of _astropy_helpers_test_
    orig_dir = testpackage(tmpdir.mkdir('orig'))

    # Make a test package that uses _astropy_helpers_test_
    source = tmpdir.mkdir('source')
    dist_dir = source.mkdir('dists')
    orig_dir.copy(source.join('_astropy_helpers_test_'))

    with source.as_cwd():
        setup_py = TEST_SETUP_PY.format(args='auto_upgrade=True')
        source.join('setup.py').write(setup_py)

        # This will be used to later to fake downloading the upgrade package
        source.join('setup.cfg').write(textwrap.dedent("""\
            [easy_install]
            find_links = {find_links}
        """.format(find_links=str(dist_dir))))

    # Make additional "upgrade" versions of the _astropy_helpers_test_
    # package--one of them is version 0.2 and the other is version 0.1.1.  The
    # auto-upgrade should ignore version 0.2 but use version 0.1.1.
    upgrade_dir_1 = testpackage(tmpdir.mkdir('upgrade_1'), version='0.2')
    upgrade_dir_2 = testpackage(tmpdir.mkdir('upgrade_2'), version='0.1.1')

    dists = []
    # For each upgrade package go ahead and build a source distribution of it
    # and copy that source distribution to a dist directory we'll use later to
    # simulate a 'download'
    for upgrade_dir in [upgrade_dir_1, upgrade_dir_2]:
        with silence():
            run_setup(str(upgrade_dir.join('setup.py')),
                      ['sdist', '--dist-dir=dist', '--formats=gztar'])
        dists.append(str(upgrade_dir.join('dist')))
        for dist_file in upgrade_dir.visit('*.tar.gz'):
            dist_file.copy(source.join('dists'))

    # Monkey with the PackageIndex in ah_bootstrap so that it is initialized
    # with the test upgrade packages, and so that it does not actually go out
    # to the internet to look for anything
    import ah_bootstrap

    class FakePackageIndex(PackageIndex):
        def __init__(self, *args, **kwargs):
            PackageIndex.__init__(self, *args, **kwargs)
            self.to_scan = dists

        def find_packages(self, requirement):
            # no-op
            pass

    ah_bootstrap.PackageIndex = FakePackageIndex

    try:
        with source.as_cwd():
            # Now run the source setup.py; this test is similar to
            # test_download_if_needed, but we explicitly check that the correct
            # *version* of _astropy_helpers_test_ was used
            run_setup('setup.py', [])

            stdout, stderr = capsys.readouterr()
            path = stdout.splitlines()[-1].strip()
            eggs = glob.glob('*.egg')
            assert eggs

            egg = source.join(eggs[0])
            assert os.path.isdir(str(egg))
            assert path == str(egg.join('_astropy_helpers_test_',
                                        '__init__.py'))
            assert 'astropy_helpers_test-0.1.1-' in str(egg)
    finally:
        ah_bootstrap.PackageIndex = PackageIndex
def test_no_cython_buildext(tmpdir):
    """
    Regression test for https://github.com/astropy/astropy-helpers/pull/35

    This tests the custom build_ext command installed by astropy_helpers when
    used with a project that has no Cython extensions (but does have one or
    more normal C extensions).
    """

    # In order for this test to test the correct code path we need to fool
    # setup_helpers into thinking we don't have Cython installed
    setup_helpers._module_state['have_cython'] = False

    test_pkg = tmpdir.mkdir('test_pkg')
    test_pkg.mkdir('_eva_').ensure('__init__.py')

    # TODO: It might be later worth making this particular test package into a
    # reusable fixture for other build_ext tests

    # A minimal C extension for testing
    test_pkg.join('_eva_').join('unit01.c').write(dedent("""\
        #include <Python.h>
        #ifndef PY3K
        #if PY_MAJOR_VERSION >= 3
        #define PY3K 1
        #else
        #define PY3K 0
        #endif
        #endif

        #if PY3K
        static struct PyModuleDef moduledef = {
            PyModuleDef_HEAD_INIT,
            "unit01",
            NULL,
            -1,
            NULL
        };
        PyMODINIT_FUNC
        PyInit_unit01(void) {
            return PyModule_Create(&moduledef);
        }
        #else
        PyMODINIT_FUNC
        initunit01(void) {
            Py_InitModule3("unit01", NULL, NULL);
        }
        #endif
    """))

    test_pkg.join('setup.py').write(dedent("""\
        from os.path import join
        from setuptools import setup, Extension
        from astropy_helpers.setup_helpers import register_commands

        NAME = '_eva_'
        VERSION = 0.1
        RELEASE = True

        cmdclassd = register_commands(NAME, VERSION, RELEASE)

        setup(
            name=NAME,
            version=VERSION,
            cmdclass=cmdclassd,
            ext_modules=[Extension('_eva_.unit01',
                                   [join('_eva_', 'unit01.c')])]
        )
    """))

    test_pkg.chdir()
    run_setup('setup.py', ['build_ext', '--inplace'])
    try:
        import _eva_.unit01
        dirname = os.path.abspath(os.path.dirname(_eva_.unit01.__file__))
        assert dirname == str(test_pkg.join('_eva_'))
    finally:
        for modname in ['_eva_', '_eva_.unit01']:
            try:
                del sys.modules[modname]
            except KeyError:
                pass
Example #49
0
)
from fairseq.models import MODEL_REGISTRY  # noqa


# torch.hub doesn't build Cython components, so if they are not found then try
# to build them here
try:
    import fairseq.data.token_block_utils_fast  # noqa
except ImportError:
    try:
        import cython  # noqa
        import os
        from setuptools import sandbox

        sandbox.run_setup(
            os.path.join(os.path.dirname(__file__), "setup.py"),
            ["build_ext", "--inplace"],
        )
    except ImportError:
        print(
            "Unable to build Cython components. Please make sure Cython is "
            "installed if the torch.hub model you are loading depends on it."
        )


# automatically expose models defined in FairseqModel::hub_models
for _model_type, _cls in MODEL_REGISTRY.items():
    for model_name in _cls.hub_models().keys():
        globals()[model_name] = functools.partial(
            _cls.from_pretrained,
            model_name,
        )
args = parser.parse_args()
print(args)

if not isdir(args.dirname):
    print('Please specify existing directory with pipelines! ', args.dirname,
          ' directory does not exist.')
    print(
        'Usage: ./run_pipeline.py directory_with_pipelines --pipeline-name my_pipeline'
    )
    print('--pipeline-name parameter is optional')
    sys.exit(-100)

if args.pipeline_name:
    if not isdir(args.dirname + '/' + args.pipeline_name):
        print('Please specify existing pipeline name as --pipeline-name ',
              args.dirname + '/' + args.pipeline_name,
              ' drectory does not exist.')
        print(
            'Usage: ./run_pipeline.py directory_with_pipelines --pipeline-name my_pipeline'
        )
        print('--pipeline-name parameter is optional')
        sys.exit(-100)

import mlflow
mlflow.set_tracking_uri("databricks")

from setuptools import sandbox
sandbox.run_setup('setup.py', ['clean', 'bdist_wheel'])

from databrickslabs_mlflowdepl import dev_cicd_pipeline
dev_cicd_pipeline.main(args.dirname, args.pipeline_name)
Example #51
0
else:
    datapath = os.environ.get('TFIO_DATAPATH')

if datapath is not None:
    for rootname, _, filenames in os.walk(
            os.path.join(datapath, "tensorflow_io")):
        if (not fnmatch.fnmatch(rootname, "*test*")
                and not fnmatch.fnmatch(rootname, "*runfiles*")):
            for filename in fnmatch.filter(filenames, "*.so"):
                src = os.path.join(rootname, filename)
                dst = os.path.join(
                    rootpath,
                    os.path.relpath(os.path.join(rootname, filename),
                                    datapath))
                print("setup.py - copy {} to {}".format(src, dst))
                shutil.copyfile(src, dst)

print("setup.py - run sandbox.run_setup {} {}".format(
    os.path.join(rootpath, "setup.py"), sys.argv[1:]))
sandbox.run_setup(os.path.join(rootpath, "setup.py"), sys.argv[1:])

if not os.path.exists("dist"):
    os.makedirs("dist")
for f in os.listdir(os.path.join(rootpath, "dist")):
    print("setup.py - copy {} to {}".format(os.path.join(rootpath, "dist", f),
                                            os.path.join("dist", f)))
    shutil.copyfile(os.path.join(rootpath, "dist", f), os.path.join("dist", f))
print("setup.py - remove {}".format(rootpath))
shutil.rmtree(rootpath)
print("setup.py - complete")
            with open(init_file, "w") as outfile:
                outfile.write("".join(package_init))
            reload(pyrsched.server)
            setup_version, git_version, pypi_version = get_versions()

    if pypi_version < setup_version:
        print("The package on PyPI needs to be updated.")
        r = input("Build Package and update on PyPI? [Yn] ") or "Y"
        if r.upper() == "Y":
            # clear dist directory
            dist_path = Path("dist").resolve()
            for f in dist_path.iterdir():
                f.unlink()

            # build package
            from setuptools.sandbox import run_setup
            run_setup("setup.py", ["sdist", "bdist_wheel"])

            # upload package
            from twine import cli
            from twine import exceptions

            try:
                twine_res = cli.dispatch(["upload", "dist/*"])
            except (exceptions.TwineException, requests.HTTPError) as exc:
                print("{}: {}".format(exc.__class__.__name__, exc.args[0]))

            print(f"Twine response: {twine_res}")
            setup_version, git_version, pypi_version = get_versions()

    print("all done...")
Example #53
0
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright

# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''

# A unique identification for the text.
#
# epub_uid = ''

# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']

# custom
extensions += ['sphinx.ext.todo']
todo_include_todos = True
extensions += ['sphinx.ext.autosummary']
extensions += ['sphinx.ext.imgmath']
numpydoc_show_class_members = False
html_theme = "sphinx_rtd_theme"
import sys, os
sys.path.insert(0, os.path.abspath('../../'))
from setuptools import sandbox
sandbox.run_setup(os.path.abspath('../../setup.py'), ['build_ext', '-i'])
autoclass_content = 'both'
Example #54
0
 def test_bdist_egg_available_on_distutils_pkg(self, distutils_package):
     run_setup('setup.py', ['bdist_egg'])
Example #55
0
def mk_api_pkg(
    openapi_spec: dict = None,
    openapi_url: str = None,
    pkg_name: str = None,
    pkg_version: str = None,
    pkg_description: str = None,
    pkg_author: str = None,
    pkg_license: str = None,
):
    """
    Build a pip installable package containing the functions to consume a running
    webservice application executed with py2http.

    :param openapi_spec: The openapi specification for the webservice.
    :type openapi_spec: dict, optional
    :param openapi_url: The url to fetch the openapi specification for the webservice.
    :type openapi_url: str, optional
    :param pkg_name: The name of the package. If no name is provided, default to
    'apipkg{timestamp}'
    :type pkg_name: str, optional
    :param pkg_version: The version number of the package. If no version is provided,
    default to 1.0.0
    :type pkg_version: str, optional
    :param pkg_description: The description of the package.
    :type pkg_description: str, optional
    :param pkg_author: The author of the package. If no author provided, default to
    'API Package Maker'
    :type pkg_author: str, optional
    :param pkg_license: The license for the package. If no license provided, default
    to 'Apache'
    :type pkg_license: str, optional
    """
    def mk_str_element(value: str):
        return value if value is None else f"'{value}'"

    def mk_pkg_name():
        dt = datetime.now(timezone.utc)
        utc_time = dt.replace(tzinfo=timezone.utc)
        utc_timestamp = int(utc_time.timestamp())
        return f'apipkg{utc_timestamp}'

    def create_file(filepath, content):
        with open(filepath, 'w') as file:
            print(content, file=file)

    tempdir = tempfile.mkdtemp()
    try:
        api = HttpClient(openapi_spec=openapi_spec, url=openapi_url)
        paths_spec = api.openapi_spec.get('paths')
        if not paths_spec:
            raise RuntimeError('The API is empty')
        func_names = [path[1:] for path in paths_spec]
        funcs_code = [
            FUNC_TPL.format(func_name=func_name) for func_name in func_names
        ]
        funcs_file_code = FUNCS_FILE_TPL.format(
            openapi_spec=mk_str_element(openapi_spec),
            openapi_url=mk_str_element(openapi_url),
            funcs=''.join(funcs_code),
        )
        init_file_code = INIT_FILE_TPL.format(funcs=', '.join(func_names))
        pkg_name = pkg_name or mk_pkg_name()
        module_dir = os.path.join(tempdir, pkg_name)
        os.makedirs(module_dir)
        create_file(filepath=os.path.join(module_dir, f'__init__.py'),
                    content=init_file_code)
        create_file(filepath=os.path.join(module_dir, f'funcs.py'),
                    content=funcs_file_code)
        server_url = api.openapi_spec['servers'][0]['url']
        pkg_version = pkg_version or '1.0.0'
        setup_cfg_content = SETUP_CFG_TPL.format(
            name=pkg_name,
            version=pkg_version,
            description=pkg_description or
            f'A client API to consume the webservices exposed at {server_url}',
            author=pkg_author or 'API Package Maker',
            license=pkg_license or 'Apache',
        )
        create_file(filepath=os.path.join(tempdir, f'setup.cfg'),
                    content=setup_cfg_content)
        create_file(filepath=os.path.join(tempdir, f'setup.py'),
                    content=SETUP_PY)
        os.chdir(tempdir)
        sandbox.run_setup('setup.py', ['sdist'])
        pkg_filename = f'{pkg_name}-{pkg_version}.tar.gz'
        if not os.path.exists(OUTPUT_DIR):
            os.makedirs(OUTPUT_DIR)
        shutil.copy(src=os.path.join(tempdir, 'dist', pkg_filename),
                    dst=OUTPUT_DIR)

        return os.path.join(OUTPUT_DIR, pkg_filename)

    finally:
        shutil.rmtree(tempdir)