Ejemplo n.º 1
0
def install_neuron(version):
    if not version:
        version = '7.6'

    nrnpath = os.path.join(os.environ['HOME'], 'neuron')

    inform('Installing NEURON %s into %s' % (version, nrnpath), indent=1)
    os.mkdir(nrnpath)
    with working_dir(nrnpath):
        print(
            co([
                'wget',
                'https://www.neuron.yale.edu/ftp/neuron/versions/v%s/nrn-%s.tar.gz'
                % (version, version)
            ]))
        print(co(['tar', 'xzvf', 'nrn-%s.tar.gz' % version]))
        print(co(['mv', 'nrn-%s' % version, 'nrn']))
        os.chdir('nrn')

        path = os.getcwd()
        pyexec = sys.executable
        co([
            "./configure --prefix=%s --without-iv --with-nrnpython=%s" %
            (path, pyexec)
        ],
           shell=True)
        print(co(['make', '-j4']))
        print(co(['make', 'install']))

        os.chdir('src/nrnpython')
        run_setup('./setup.py', ['install'])
Ejemplo n.º 2
0
def test_imports(gui=False):
  import sys
  from distutils.core import run_setup
  
  critical = False
  
  try:
    import numpy
  except ImportError as err:
    critical = True
    warn('Critical Python module "numpy" is not installed or accessible')

  try:
    import cython
  except ImportError as err:
    critical = True
    warn('Critical Python module "cython" is not installed or accessible')
  
  try:
    import dyn_util
  except ImportError as err:
    warn('Utility C/Cython code not compiled. Attempting to compile now...')    
    run_setup('setup_cython.py', ['build_ext', '--inplace'])
    
    try:
      import dyn_util
    except ImportError as err:
      critical = True
      warn('Utility C/Cython code compilation/import failed')   
    
  if critical:
    warn('NucDynamics cannot proceed because critical Python modules are not available', 'ABORT')
    sys.exit(0)
Ejemplo n.º 3
0
def upload_app(first_time: bool = False, build_only: bool = False):
    run_setup('setup.py', script_args=['sdist', 'bdist_wheel'])
    if build_only:
        return
    if first_time:
        twine('register')
    twine('upload')
Ejemplo n.º 4
0
def build(module_old,
          build_dir="build",
          temp_dir="temp",
          exclude_file=["__init__.py"]):
    try:
        os.makedirs(build_dir)
    except:
        pass

    all_pys = list()

    def add_file(f):
        all_pys.append(f)

    module_name = os.path.split(module_old)[-1]
    module_path = os.path.join(build_dir, module_name)
    shutil.rmtree(module_path, ignore_errors=True)
    shutil.copytree(module_old, module_path)
    recur_files(module_path, add_file, 'py', exclude=exclude_file)

    setup_script = "from distutils.core import setup\n" \
                   "from Cython.Build import cythonize\n" \
                   "setup(ext_modules=cythonize({}, nthreads=8))".format(all_pys)
    setup_file = os.path.join(module_path, 'setup.py')
    with open(setup_file, 'w') as f:
        f.write(setup_script)
    run_setup(
        setup_file,
        ['build_ext', '--build-lib', build_dir, '--build-temp', temp_dir])
    # subprocess.call(['python', setup_file, 'build_ext', '--build-lib', build_dir, '--build-temp', temp_dir])
    recur_files(module_path,
                os.remove, ['c', 'py', 'pyc'],
                exclude=exclude_file)
    # shutil.rmtree(temp_dir, ignore_errors=True)
    pass
Ejemplo n.º 5
0
def install_neuron(get_latest=False):
    nrnpath = os.path.join(os.environ['HOME'], 'neuron')
    os.mkdir(nrnpath)
    with working_dir(nrnpath):
        if get_latest:
            print co(
                ['hg', 'clone', 'http://www.neuron.yale.edu/hg/neuron/nrn'])
            os.chdir('nrn')
            print co(['./build.sh'])
        else:
            print co([
                'wget',
                'http://www.neuron.yale.edu/ftp/neuron/versions/v7.3/nrn-7.3.tar.gz'
            ])
            print co(['tar', 'xzvf', 'nrn-7.3.tar.gz'])
            print co(['mv', 'nrn-7.3', 'nrn'])
            os.chdir('nrn')

        path = os.getcwd()
        pyexec = sys.executable
        co([
            "./configure --prefix=%s --without-iv --with-nrnpython=%s" %
            (path, pyexec)
        ],
           shell=True)
        print co(['make'])
        print co(['make', 'install'])

        os.chdir('src/nrnpython')
        run_setup('./setup.py', ['install'])
Ejemplo n.º 6
0
    def run_f2py_compilation(self):
        """Compile fortran module using f2py and then load it

        Raises (on error):
        RuntimeError

        First check the compilation wrapper exists and if not create it. Then run
        the compilation within a try-except block, printing the output and any errors.
        Finally load the module.
        """

        if not os.path.isfile(self.wrapper_path):
            self.create_wrapper()
        path_to_build_dir = os.path.join(build_path, self.fortran_module_name,
                                         "temp")
        if os.path.isdir(path_to_build_dir):
            shutil.rmtree(path_to_build_dir)
        os.makedirs(path_to_build_dir)
        setup_args = [
            "build_ext", "--build-lib={0}".format(shared_object_path),
            "--build-temp={0}".format(
                os.path.join(build_path, self.fortran_module_name,
                             "temp")), "--build-path={0}".format(build_path),
            "--module-name={0}".format(self.fortran_module_name),
            "--sources={0}".format(str(self.sources)),
            "--include-path={0}".format(str(self.include_path)),
            "--objects={0}".format(str(self.additional_fortran_files))
        ]
        if using_mpi():
            setup_args.append("--use-mpi")
        distutils_core.run_setup(self.wrapper_path,
                                 setup_args,
                                 stop_after="run")
        self.load_module()
Ejemplo n.º 7
0
def _generation_sub_repositories_wheels(git_info, clone_repositories_dir,
                                        wheelhouse_dir):
    def get_existing_wheel_names():
        def extract_wheel_name(filepath):
            filename = os.path.basename(filepath)
            wheel_name_search = re.search(pattern, filename)
            if not wheel_name_search:
                raise RuntimeError(
                    f'Failed to extract the wheel name from "{filename}"')
            return wheel_name_search.group(1)

        pattern = r'(.+)-[0-9]*\.[0-9]*\.[0-9]*.*\.whl'
        filepaths = glob.glob(f'{wheelhouse_dir}/*.whl')
        return [extract_wheel_name(filepath) for filepath in filepaths]

    for dep_git_info in git_info:
        dep_name = dep_git_info['name']
        existing_wheel_names = get_existing_wheel_names()
        if not dep_name in existing_wheel_names:
            target_dir = os.path.join(clone_repositories_dir, dep_name)
            clone_repository(
                url=dep_git_info['url'],
                branch=dep_git_info['branch'],
                target_dir=target_dir,
                quiet=True,
            )
            _generate_repository_wheels(target_dir, clone_repositories_dir,
                                        wheelhouse_dir)
            os.chdir(target_dir)
            run_setup('setup.py',
                      ['bdist_wheel', f'--dist-dir={wheelhouse_dir}'])
Ejemplo n.º 8
0
    def run(self):

        from Cython.Build import cythonize
        self.extensions = cythonize(self.extensions)

        _build_ext.run(self)

        run_setup(os.path.join(os.getcwd(), "setup.py"), ['build_py'] + extra_args)
Ejemplo n.º 9
0
def install():
    """
    Run Installer
    """
    build()
    uninstall()
    run_setup(script_name=os.path.join(ROOT, 'setup.py'), script_args=['install'])
    _pyenv_rehash()
Ejemplo n.º 10
0
def createPackage(wheel):
    """
    Invoke setup.py to create a package
    """
    if wheel:
        script_args = ['bdist_wheel', '--universal']
    else:
        script_args = ['sdist']
    run_setup('setup.py', script_args=script_args)
Ejemplo n.º 11
0
def createPackage(wheel):
    """
    Invoke setup.py to create a package
    """
    if wheel:
        script_args = ['bdist_wheel', '--universal']
    else:
        script_args = ['sdist']
    run_setup('setup.py', script_args=script_args)
Ejemplo n.º 12
0
    def run(self, ini_config, ini_section, locales):
        if not locales:
            cp = ConfigParser()
            cp.read(ini_config)
            locales = config_list(cp.get(ini_section, 'available_languages'))

        package_name = run_setup('setup.py', stop_after='init').get_name()
        locale_dir = os.path.join(package_name, 'locale')
        pot_filename = os.path.join(locale_dir, 'messages.pot')

        # delete existing .mo files
        for filename in glob.glob(
                os.path.join(locale_dir, '*/LC_MESSAGES/*.mo')):
            os.remove(filename)

        # generate new .pot file
        run_setup('setup.py', [
            'extract_messages', '-o', pot_filename])

        # initialize new locales' .po files
        for locale in locales:
            po_filename = os.path.join(
                locale_dir, '%s/LC_MESSAGES/messages.po' % locale)
            if os.path.exists(po_filename):
                continue
            run_setup('setup.py', [
                'init_catalog', '-i', pot_filename, '-d', locale_dir,
                '-l', locale])

        # update and compile .po files
        run_setup('setup.py', [
            'update_catalog', '-i', pot_filename, '-d', locale_dir])
        run_setup('setup.py', [
            'compile_catalog', '-d', locale_dir])
Ejemplo n.º 13
0
    def run(self, ini_config, ini_section, locales):
        if not locales:
            cp = ConfigParser()
            cp.read(ini_config)
            locales = config_list(cp.get(ini_section, 'available_languages'))

        package_name = run_setup('setup.py', stop_after='init').get_name()
        locale_dir = os.path.join(package_name, 'locale')
        pot_filename = os.path.join(locale_dir, 'messages.pot')

        # delete existing .mo files
        for filename in glob.glob(
                os.path.join(locale_dir, '*/LC_MESSAGES/*.mo')):
            os.remove(filename)

        # generate new .pot file
        run_setup('setup.py', ['extract_messages', '-o', pot_filename])

        # initialize new locales' .po files
        for locale in locales:
            po_filename = os.path.join(locale_dir,
                                       '%s/LC_MESSAGES/messages.po' % locale)
            if os.path.exists(po_filename):
                continue
            run_setup('setup.py', [
                'init_catalog', '-i', pot_filename, '-d', locale_dir, '-l',
                locale
            ])

        # update and compile .po files
        run_setup('setup.py',
                  ['update_catalog', '-i', pot_filename, '-d', locale_dir])
        run_setup('setup.py', ['compile_catalog', '-d', locale_dir])
Ejemplo n.º 14
0
    def __run_setup_py(
        path_setup: str, data_dist: dict, run_py=False, deps_installed=None
    ):
        """Method responsible to run the setup.py

        :param path_setup: full path to the setup.py
        :param data_dist: metadata
        :param run_py: If it should run the setup.py with run_py, otherwise it will run
        invoking the distutils directly
        """
        deps_installed = deps_installed if deps_installed else []
        original_path = deepcopy(sys.path)
        pip_dir = mkdtemp(prefix="pip-dir-")
        if not os.path.exists(pip_dir):
            os.mkdir(pip_dir)
        if os.path.dirname(path_setup) not in sys.path:
            sys.path.append(os.path.dirname(path_setup))
            sys.path.append(pip_dir)
        PyPi._install_deps_if_necessary(path_setup, data_dist, pip_dir)
        try:
            if run_py:
                import runpy

                data_dist["run_py"] = True
                runpy.run_path(path_setup, run_name="__main__")
            else:
                core.run_setup(
                    path_setup, script_args=["install", f"--target={pip_dir}"]
                )
        except ModuleNotFoundError as err:
            log.debug(
                f"When executing setup.py did not find the module: {err.name}."
                f" Exception: {err}"
            )
            dep_install = err.name
            if dep_install in deps_installed:
                dep_install = dep_install.split(".")[0]
            if dep_install not in deps_installed:
                deps_installed.append(dep_install)
                PyPi._pip_install_dep(data_dist, dep_install, pip_dir)
                PyPi.__run_setup_py(
                    path_setup, data_dist, run_py, deps_installed=deps_installed
                )
        except Exception as err:
            log.debug(f"Exception when executing setup.py as script: {err}")
        data_dist.update(
            PyPi._merge_sdist_metadata(
                data_dist, PyPi._get_setup_cfg(os.path.dirname(str(path_setup)))
            )
        )
        log.debug(f"Data recovered from setup.py: {data_dist}")
        if os.path.exists(pip_dir):
            shutil.rmtree(pip_dir)
        sys.path = original_path
Ejemplo n.º 15
0
    def preprocess(self, nb, resources):
        self.first_cell_code_index = -1
        self.hidden_block_lines = []

        nb, resources = super(CompileTestFunctions,
                              self).preprocess(nb, resources)

        self.assignment_id = resources['nbgrader']['assignment']
        self.notebook_id = resources['nbgrader']['notebook']

        if self.hidden_block_lines:
            orig_dir = os.getcwd()

            # make && compile library
            tmp_path = tempfile.mkdtemp()
            os.chdir(tmp_path)

            filename = '{}.py'.format(self.notebook_id)

            with open(filename, 'w') as fp:
                fp.write('\n'.join(self.hidden_block_lines))

            with open('setup.py', 'w') as fp:
                fp.write(
                    SETUP_FILE_TEMPLATE.format(ext_name=self.notebook_id,
                                               filename=filename))

            run_setup('setup.py', ['build_ext', '--inplace'])
            os.chdir(orig_dir)

            # make package
            library_path = os.path.join('release', self.assignment_id, 'tests')
            os.makedirs(library_path, exist_ok=True)

            package_init_file = os.path.join(library_path, '__init__.py')
            if not os.path.exists(package_init_file):
                open(package_init_file, 'w').close()

            compiled_filename = '{}.so'.format(self.notebook_id)

            copyfile(
                os.path.join(tmp_path, compiled_filename),
                os.path.join(library_path, compiled_filename),
            )

            # cleanup
            rmtree(tmp_path, ignore_errors=True)

            # add imports to first cell
            cell = nb.cells[self.first_cell_code_index]
            cell.source = "from tests.{} import *\n".format(
                self.notebook_id) + cell.source

        return nb, resources
Ejemplo n.º 16
0
 def package(self):
     try:
         work_dir = str(Path.cwd())
         os.chdir(self.ai_job.package_src)
         run_setup(
             'setup.py',
             script_args=['sdist', '--dist-dir=dist', '--format=gztar'])
         os.chdir(work_dir)
         return True
     except Exception as e:
         print(str(e))
         return False
Ejemplo n.º 17
0
def execute_setup(**kwargs):
    logging.info('Executing Setup')
    config = kwargs.get('config')
    setup_script = config.get('script')
    if setup_script:
        config['script'] = False
    else:
        setup_script = DEFAULT_SETUP_SCRIPT
    generator = _SetupArgumentGenerator()
    args = generator.generate_from(config)
    dcore.run_setup(setup_script, args)
    return 0
Ejemplo n.º 18
0
    def run(self):

        from Cython.Build import cythonize

        compiler_directives = {}
        if linetrace:
            compiler_directives['linetrace'] = True

        self.extensions = cythonize(self.extensions, compiler_directives=compiler_directives)

        _build_ext.run(self)

        run_setup(os.path.join(os.getcwd(), "setup.py"),
                  ['build_py'] + extra_args)
Ejemplo n.º 19
0
def _build_mechanism(mechanism, mechanism_style):

    species_file = os.path.join(mech_dir, mechanism + '.spc')
    reactions_file = os.path.join(mech_dir, mechanism + '.eqn')
    extra_rate_file = os.path.join(mech_dir, mechanism + '.rate')
    if not os.path.isfile(extra_rate_file):
        extra_rate_file = None

    print('Building {}'.format(mechgen.derivative_file))
    mechgen.generate_mechanism(mechanism_style=mechanism_style,
                               species_file=species_file,
                               reactions_file=reactions_file,
                               additional_rates_file=extra_rate_file)

    run_setup(os.path.join(_mydir, 'setup.py'), ['build_ext', '--inplace'])
Ejemplo n.º 20
0
def test_imports(gui=False):
    import sys
    from distutils.core import run_setup

    critical = False

    try:
        import numpy
    except ImportError as err:
        critical = True
        warn('Critical Python module "numpy" is not installed or accessible')

    try:
        import cython
    except ImportError as err:
        critical = True
        warn('Critical Python module "cython" is not installed or accessible')

    try:
        from .. import dyn_util
    except ImportError as err:
        import os
        cwd = os.getcwd()
        try:
            fdir = os.path.dirname(os.path.normpath(__file__))
            print(fdir)
            os.chdir(fdir)
            warn(
                'Utility C/Cython code not compiled. Attempting to compile now...'
            )
            run_setup('setup_cython.py', ['build_ext', '--inplace'])
        finally:
            os.chdir(cwd)

        try:
            import dyn_util
            warn('NucDynamics C/Cython code compiled. Please re-run command.')
            sys.exit(0)

        except ImportError as err:
            critical = True
            warn('Utility C/Cython code compilation/import failed')

    if critical:
        warn(
            'NucDynamics cannot proceed because critical Python modules are not available',
            'ABORT')
        sys.exit(0)
Ejemplo n.º 21
0
def build_egg():
    from distutils import core

    if os.path.isdir("dist"):
        shutil.rmtree("dist")
    dist = core.run_setup("setup.py", script_args=["-q", "bdist_egg"])
    return os.path.abspath(dist.dist_files[0][2])
Ejemplo n.º 22
0
def build_egg():
    from distutils import core

    if os.path.isdir("dist"):
        shutil.rmtree("dist")
    dist = core.run_setup("setup.py", script_args=["-q", "bdist_egg"])
    return os.path.abspath(dist.dist_files[0][2])
Ejemplo n.º 23
0
def build_sdist(src, distdir):
    from distutils.core import run_setup
    # save argv. distutils run_setup changes argv and fails at restoring properly
    save_argv = list(sys.argv)
    # block any other thread from executing since we're changing cwd and sys.argv
    with sync_lock, tee.StdoutTee('build_sdist.out', 'w', 2), \
         tee.StderrTee('build_sdist.err', 'w', 2):
        # make sure no other processing is executing while we change the working directory for setup.py
        cwd = os.getcwd()
        sys.stdout.errors = None
        sys.stderr.errors = None
        try:
            if not src.endswith('setup.py'):
                setup_path = os.path.join(src, 'setup.py')
            else:
                setup_path = src
                src = os.path.dirname(setup_path)
            os.chdir(src)
            sdist = run_setup(setup_path,
                              script_args=['sdist', '--dist-dir', distdir])
        finally:
            # restore cwd
            os.chdir(cwd)
            # restore sys argv
            for i, v in enumerate(save_argv):
                sys.argv[i] = v
            if len(sys.argv) > len(save_argv):
                del sys.argv[len(save_argv):]
    return sdist
Ejemplo n.º 24
0
def upload_package(repository, versions=None, skip_empty_versions=False):
    package = run_setup('setup.py', stop_after='config')
    package_meta = package.metadata
    logging.info(f'Found package %s.', package_meta.get_name())

    if not versions:
        versions = [package_meta.get_version()]

    # for n in dir(package):
    #     val = getattr(package, n)
    #     if n.startswith('get_'):
    #         try:
    #             val = val()
    #         except:
    #             val = getattr(package, n)
    #     print(f"{Fore.GREEN}{n}{Fore.LIGHTGREEN_EX}: {Fore.BLACK}{val}{Style.RESET_ALL}")

    for version in versions:
        logging.info(f'Processing version %s.', version, extra={'indent': 1})
        package_meta.version = version
        try:
            repository.upload_files(package_meta.get_name(),
                                    list(discover_dists(package_meta)))
        except NoDistFilesFoundError as e:
            if skip_empty_versions:
                logger.warning(f'{e.args[0]} Skipping...', *e.args[1:])
            else:
                raise
Ejemplo n.º 25
0
def parse_setup_file(input_file):
    output = []
    cur_dir = os.getcwd()
    setup_dir = os.path.abspath(os.path.dirname(input_file))
    sys.path.append(setup_dir)
    os.chdir(setup_dir)
    try:
        setup = run_setup(input_file, stop_after="config")
    except Exception as e:
        logger.error(f"run_setup: {e}")
        return []

    reqs_var = ["install_requires", "setup_requires", "extras_require"]
    for v in reqs_var:
        reqs = getattr(setup, v)
        if isinstance(reqs, list):
            for i in pkg_resources.parse_requirements(reqs):
                output.append(i.project_name)

        elif isinstance(reqs, dict):
            for i in pkg_resources.parse_requirements(
                {v
                 for req in reqs.values() for v in req}):
                output.append(i.project_name)
    os.chdir(cur_dir)
    return output
Ejemplo n.º 26
0
 def _mksetup(self):
     with open(self.setup_name, 'w') as s, open(self.cfg_name, 'w') as c:
         s.write(self.setup_contents)
         c.write(self.cfg_contents)
     self.to_clean.add(self.setup_name)
     self.to_clean.add(self.cfg_name)
     self.setup_obj = run_setup(self.setup_name)
Ejemplo n.º 27
0
def list_appl_paths(app_name=None, appcmd_exe=None):
    appcmd_exe = get_appcmd_exe(appcmd_exe)
    if appcmd_exe is None:
        return
    cmd = [
        appcmd_exe, 'list', 'config', '/section:system.applicationHost/sites',
        '/xml'
    ]
    logger.info(
        ('Querying appcmd.exe for '
         'sites/site/application/virtualDirectory/@physicalPath:\n{0}').format(
             ' '.join(cmd)))
    sites_output = subprocess.check_output(cmd)
    sites_dom = minidom.parseString(sites_output)
    cwd = os.getcwd()
    # Work backward through the list, most recent sites are last
    for site in reversed(sites_dom.getElementsByTagName('site')):
        for app in reversed(site.getElementsByTagName('application')):
            for vdir in app.getElementsByTagName('virtualDirectory'):
                path = os.path.expandvars(vdir.getAttribute('physicalPath'))
                if app_name:
                    if not os.path.exists(os.path.join(path, 'setup.py')):
                        continue
                    try:
                        os.chdir(path)
                        dist = core.run_setup('setup.py',
                                              stop_after='commandline')
                    finally:
                        os.chdir(cwd)
                    dist_name = dist.get_name()
                    if app_name != dist_name:
                        # Not an instance of this app
                        continue
                yield path
Ejemplo n.º 28
0
def egg_info(files: Dict[str, str]) -> Tuple[Message, Distribution]:
    # TODO consider
    # https://docs.python.org/3/distutils/apiref.html#distutils.core.run_setup
    # and whether that gives a Distribution that knows setuptools-only options
    with tempfile.TemporaryDirectory() as d:
        for relname, contents in files.items():
            Path(d, relname).parent.mkdir(exist_ok=True, parents=True)
            Path(d, relname).write_text(contents)

        try:
            cwd = os.getcwd()
            stdout = sys.stdout

            os.chdir(d)
            sys.stdout = io.StringIO()
            dist = run_setup(f"setup.py", ["egg_info"])
        finally:
            os.chdir(cwd)
            sys.stdout = stdout

        sources = list(Path(d).rglob("PKG-INFO"))
        assert len(sources) == 1

        with open(sources[0]) as f:
            parser = email.parser.Parser()
            info = parser.parse(f)
        reader = SetuptoolsReader(Path(d))
        dist = reader.get_metadata()
        return info, dist
Ejemplo n.º 29
0
    def annotate(self, values: Dict[str, str]):
        """
        Extract some stuff from setup.py, if present.

        If setup.py is present, we'll add the following keys to `values`:

        * `name`: the output of `python setup.py name`
        * `version`: the output of `python setup.py version`

        """
        super().annotate(values)
        setup_py = pathlib.Path.cwd() / 'setup.py'
        if setup_py.exists():
            # Extract some stuff from python itself
            python_setup = run_setup(str(setup_py))
            values['name'] = python_setup.get_name()
            values['version'] = python_setup.get_version()
            return

        # No setup.py; let's try Makefile
        makefile = pathlib.Path.cwd() / 'Makefile'
        if makefile.exists():
            values['name'] = subprocess.check_output(
                ['make', 'image_name']).decode('utf8').strip()
            values['version'] = subprocess.check_output(
                ['make', 'version']).decode('utf8').strip()
            return
Ejemplo n.º 30
0
 def run(self):
     for path in self.msdeploy_bdists:
         distribution = core.run_setup('setup.py', stop_after='commandline')
         distribution.msdeploy_app_name = get_app_name(path)
         self.delete_installer_cache(distribution)
         self.delete_stamp_files(distribution)
     self.delete_feed_cache()
Ejemplo n.º 31
0
 def __downloadRequirements(self, prebuiltPackages):
     packages = []
     with TempDirectory(None, False) as tempDir:
         cmd = DownloadCommand()
         options, args = cmd.parse_args([
             '--no-binary', ':all:', '--no-clean', '-b', tempDir.path,
             '--dest', tempDir.path, '-r', 'requirements.txt'
         ])
         requirement_set = cmd.run(options, args)
         for req in requirement_set.successfully_downloaded:
             dist = None
             if req.req.name in prebuiltPackages:
                 packages.insert(0, prebuiltPackages[req.req.name].location)
                 continue
             if req.req.name in self.skip_dependencies:
                 logging.info("Do not include dependency %s", req.req.name)
                 continue
             with chdir(req.source_dir):
                 # Save sys.path
                 sysPath = sys.path[:]
                 sys.path.append(req.source_dir)
                 dist = run_setup('setup.py', [
                     'bdist_egg', '--exclude-source-files', '--dist-dir',
                     self.packageDir
                 ])
                 # Restore
                 sys.path = sysPath
             if len(dist.dist_files) == 0:
                 raise Exception(
                     'Requirement %s does not provide any distribution files'
                     % req.req.name)
             for distfile in dist.dist_files:
                 packages.insert(0, distfile[2])
     return packages
Ejemplo n.º 32
0
def generate_dist(dist_dir=None, script_name='setup.py'):

    if dist_dir:
        current_wd = os.getcwd()
        os.chdir(dist_dir)

    try:

        with tempfile.TemporaryDirectory(prefix='ap_dist') as tmp_dir,\
             StringIO() as pkg_info:

            with patch('sys.stdout', new_callable=StringIO) as mock_stdout:
                dist = run_setup(script_name, ['sdist', '-d', tmp_dir,
                                               'bdist_wheel', '-d', tmp_dir])

            local_locator = DirectoryLocator(tmp_dir)
            filenames = [Path(urlparse(uri).path)
                         for uri in local_locator.get_project(dist.metadata.name)['digests'].keys()]

            dist.metadata.write_pkg_file(pkg_info)
            pkg_info.seek(0)
            metadata = Metadata(fileobj=pkg_info)
            yield metadata, filenames
    finally:
        if dist_dir:
            os.chdir(current_wd)
Ejemplo n.º 33
0
 def run(self):
     for path in self.msdeploy_bdists:
         distribution = core.run_setup('setup.py', stop_after='commandline')
         distribution.msdeploy_app_name = get_app_name(path)
         self.delete_installer_cache(distribution)
         self.delete_stamp_files(distribution)
     self.delete_feed_cache()
Ejemplo n.º 34
0
    def _context_from_setuppy(self):
        setuppy_path = os.path.join(self.rootdir, "setup.py")
        if not os.path.exists(setuppy_path):
            raise DebianConfigurationException("Failed to find setup.py")

        dist = run_setup(setuppy_path)
        context = {
            'name': dist.get_name(),
            'version': dist.get_version(),
            'maintainer': dist.get_maintainer(),
            'maintainer_email': dist.get_maintainer_email(),
            'description': dist.get_description(),
        }

        scripts = []
        if dist.entry_points is not None and 'console_scripts' in dist.entry_points:
            scripts += [script.split('=')[0] for script in dist.entry_points['console_scripts']]

        if dist.scripts is not None:
            scripts += [script.rsplit('/', 1)[-1] for script in dist.scripts]

        for name, value in context.items():
            while not value or value == UNKNOWN:
                value = input(
                    "The '{}' parameter is not defined in setup.py. "
                    "Please define it for debian configuration: ".format(name))
                if not value:
                    print("Invalid value. Please try again")

            context[name] = value

        context['scripts'] = scripts

        return context
Ejemplo n.º 35
0
def list_appl_paths(app_name=None, appcmd_exe=None):
    appcmd_exe = get_appcmd_exe(appcmd_exe)
    if appcmd_exe is None:
        return
    cmd = [appcmd_exe, 'list', 'config',
           '/section:system.applicationHost/sites', '/xml']
    logger.info(
        ('Querying appcmd.exe for '
         'sites/site/application/virtualDirectory/@physicalPath:\n{0}'
         ).format(' '.join(cmd)))
    sites_output = subprocess.check_output(cmd)
    sites_dom = minidom.parseString(sites_output)
    cwd = os.getcwd()
    # Work backward through the list, most recent sites are last
    for site in reversed(sites_dom.getElementsByTagName('site')):
        for app in reversed(site.getElementsByTagName('application')):
            for vdir in app.getElementsByTagName('virtualDirectory'):
                path = os.path.expandvars(vdir.getAttribute('physicalPath'))
                if app_name:
                    if not os.path.exists(os.path.join(path, 'setup.py')):
                        continue
                    try:
                        os.chdir(path)
                        dist = core.run_setup(
                            'setup.py', stop_after='commandline')
                    finally:
                        os.chdir(cwd)
                    dist_name = dist.get_name()
                    if app_name != dist_name:
                        # Not an instance of this app
                        continue
                yield path
Ejemplo n.º 36
0
def test_Crypt(config):
    # TODO: disabled for now
    config["useCrypt"] = 0
    return

    # Crypt support. We need to build the AES module and we'll use distutils
    # for that. FIXME: the day we'll use distutils for everything this will be
    # a solved problem.
    logger.info("trying to build crypt support...")
    from distutils.core import run_setup

    cwd = os.getcwd()
    args = sys.argv[:]
    try:
        os.chdir(os.path.join(HOMEPATH, "source", "crypto"))
        dist = run_setup("setup.py", ["install"])
        if dist.have_run.get("install", 0):
            config["useCrypt"] = 1
            logger.info("... crypto support available")
        else:
            config["useCrypt"] = 0
            logger.info("... error building crypto support")
    finally:
        os.chdir(cwd)
        sys.argv = args
Ejemplo n.º 37
0
    def _context_from_setuppy(self):
        setuppy_path = os.path.join(self.rootdir, "setup.py")
        if not os.path.exists(setuppy_path):
            raise DebianConfigurationException("Failed to find setup.py")

        dist = run_setup(setuppy_path)
        context = {
            'name': dist.get_name(),
            'version': dist.get_version(),
            'maintainer': dist.get_maintainer(),
            'maintainer_email': dist.get_maintainer_email(),
            'description': dist.get_description(),
        }

        scripts = []
        if dist.entry_points is not None and 'console_scripts' in dist.entry_points:
            scripts += [script.split('=')[0] for script in dist.entry_points['console_scripts']]

        if dist.scripts is not None:
            scripts += [script.rsplit('/', 1)[-1] for script in dist.scripts]

        for name, value in context.items():
            while not value or value == UNKNOWN:
                value = input(
                    "The '{}' parameter is not defined in setup.py. "
                    "Please define it for debian configuration: ".format(name))
                if not value:
                    print("Invalid value. Please try again")

            context[name] = value

        context['scripts'] = scripts

        return context
Ejemplo n.º 38
0
Archivo: run.py Proyecto: tylerml/pyjen
def _make_package():
    """Creates the redistributable package for the PyJen project"""
    import re
    from distutils.core import run_setup

    modlog.info("creating package...")

    # delete any pre-existing packages
    if os.path.exists("dist"):
        modlog.debug("Cleaning old package - ./dist")
        shutil.rmtree("dist")

    # create new package
    try:
        modlog.debug("Building package using distutils")
        distobj = run_setup("setup.py", ["-q", "bdist_wheel"])
        distobj.run_commands()
    except Exception as err:
        modlog.error("Failed to generate wheel file")
        modlog.error(err)
        exit(1)

    # delete intermediate folders
    modlog.debug("Purging intermediate package folders")
    if os.path.exists("build"):
        shutil.rmtree("build")
    if os.path.exists("pyjen.egg-info"):
        shutil.rmtree("pyjen.egg-info")

    # sanity check: make sure wheel file exists
    if not os.path.exists("dist"):
        modlog.error(
            "Package folder ./dist not found. Package operation must have failed."
        )
        sys.exit(1)

    package_contents = os.listdir("dist")
    if len(package_contents) > 1:
        modlog.warning(
            "Multiple files detected in package folder. Only one .whl file expected."
        )

    wheel_file_found = False
    wheel_file_pattern = r"^pyjen.*-py2.py3-none-any.whl$"
    for obj in package_contents:
        file_path = os.path.join(os.getcwd(), "dist", obj)
        if os.path.isfile(file_path) and re.search(wheel_file_pattern,
                                                   obj) is not None:
            wheel_file_found = True

    if not wheel_file_found:
        modlog.error("Expected output file (.whl) not found in ./dist folder.")
        sys.exit(1)

    # TODO: test package
    # pushd functional_tests > /dev/null
    # ./package_tests.sh

    modlog.info("package created successfully")
Ejemplo n.º 39
0
def get_dist_metadata():
    try:
        dist = run_setup('setup.py', stop_after='init')
    except FileNotFoundError:
        raise FatalError('Error: "setup.py" not found in current directory')

    meta = dist.metadata
    return {'project_name': meta.name, 'current_version': meta.version}
Ejemplo n.º 40
0
def _build_setup_requirements() -> Dict[str, List[Requirement]]:
    """Load requirments from setup.py."""
    dist = run_setup("setup.py")
    reqs = {"core": dist.install_requires}  # type: ignore
    reqs.update(dist.extras_require)  # type: ignore
    return {
        extra: list(parse_requirements(reqs))
        for extra, reqs in reqs.items()
    }
Ejemplo n.º 41
0
 def run(self):
     setup = core.run_setup('neoscrypt_module/setup.py',
                            stop_after='commandline')
     if platform.system() == 'Windows':
         setup.command_options['build_ext'] = {
             'compiler': ('build_ext', 'mingw32')
         }
     setup.run_command('install')
     install.run(self)
Ejemplo n.º 42
0
    def get_distribution(self):
        setup_path = self.project_dir / "setup.py"
        if not os.path.exists(setup_path):
            return None

        try:
            distribution = run_setup(setup_path, stop_after="init")
        except Exception, e:
            print "Warning: failed to load distribution information from setup.py. Error was: %s" % e
            return None
Ejemplo n.º 43
0
def finalize(args):
    distclean()
    try:
        check_pypirc()
        repository = Repository(REPO_ROOT, args.repo)
        img_manager = ImageManager(args.release)
        pr_data = repository.find_release_pr(args.release)
        if not pr_data:
            raise ScriptError('No PR found for {}'.format(args.release))
        if not check_pr_mergeable(pr_data):
            raise ScriptError('Can not finalize release with an unmergeable PR')
        if not img_manager.check_images():
            raise ScriptError('Missing release image')
        br_name = branch_name(args.release)
        if not repository.branch_exists(br_name):
            raise ScriptError('No local branch exists for this release.')
        gh_release = repository.find_release(args.release)
        if not gh_release:
            raise ScriptError('No Github release draft for this version')

        repository.checkout_branch(br_name)

        pypandoc.convert_file(
            os.path.join(REPO_ROOT, 'README.md'), 'rst', outputfile=os.path.join(REPO_ROOT, 'README.rst')
        )
        run_setup(os.path.join(REPO_ROOT, 'setup.py'), script_args=['sdist', 'bdist_wheel'])

        merge_status = pr_data.merge()
        if not merge_status.merged and not args.finalize_resume:
            raise ScriptError(
                'Unable to merge PR #{}: {}'.format(pr_data.number, merge_status.message)
            )

        pypi_upload(args)

        img_manager.push_images()
        repository.publish_release(gh_release)
    except ScriptError as e:
        print(e)
        return 1

    return 0
Ejemplo n.º 44
0
    def write_feed(self, dist_file, **kw):
        logger.info('Writing Web Platform Installer feed to {0}'.format(
            dist_file))

        view = core.run_setup('setup.py', stop_after='commandline')
        view.context = self
        view.dists = self.distributions
        view.now = datetime.datetime.now()

        open(dist_file, 'w').write(self.template(view=view, **kw))
        return dist_file
Ejemplo n.º 45
0
Archivo: pypi.py Proyecto: seveas/golem
    def process_job_simple(self, job):
        self.logger.info("Uploading package to PyPI")
        files = job.fetch_artefacts(job.requires[0], job.tarball)
        files = [('sdist', 'source', files[0])]

        job.shell.git('checkout', job.sha1, '--', 'setup.py')
        Command.announce = self._log
        dist = run_setup('setup.py')
        dist.run_command('register')
        dist.dist_files = files
        dist.run_command('upload')
Ejemplo n.º 46
0
def _make_package():
    """Creates the redistributable package for the PyJen project"""
    import re
    from distutils.core import run_setup

    modlog.info("creating package...")

    # delete any pre-existing packages
    if os.path.exists("dist"):
        modlog.debug("Cleaning old package - ./dist")
        shutil.rmtree("dist")

    # create new package
    try:
        modlog.debug("Building package using distutils")
        distobj = run_setup("setup.py", ["-q", "bdist_wheel"])
        distobj.run_commands()
    except Exception as err:
        modlog.error("Failed to generate wheel file")
        modlog.error(err)
        exit(1)

    # delete intermediate folders
    modlog.debug("Purging intermediate package folders")
    if os.path.exists("build"):
        shutil.rmtree("build")
    if os.path.exists("pyjen.egg-info"):
        shutil.rmtree("pyjen.egg-info")

    # sanity check: make sure wheel file exists
    if not os.path.exists("dist"):
        modlog.error("Package folder ./dist not found. Package operation must have failed.")
        sys.exit(1)

    package_contents = os.listdir("dist")
    if len(package_contents) > 1:
        modlog.warning("Multiple files detected in package folder. Only one .whl file expected.")

    wheel_file_found = False
    wheel_file_pattern = r"^pyjen.*-py2.py3-none-any.whl$"
    for obj in package_contents:
        file_path = os.path.join(os.getcwd(), "dist", obj)
        if os.path.isfile(file_path) and re.search(wheel_file_pattern, obj) is not None:
            wheel_file_found = True

    if not wheel_file_found:
        modlog.error("Expected output file (.whl) not found in ./dist folder.")
        sys.exit(1)

    # TODO: test package
    # pushd functional_tests > /dev/null
    # ./package_tests.sh

    modlog.info("package created successfully")
Ejemplo n.º 47
0
def install_neuron(get_latest=False):
    nrnpath = os.path.join(os.environ['HOME'],'neuron')
    os.mkdir(nrnpath)
    with working_dir(nrnpath):
        if get_latest:
            print co(['hg', 'clone', 'http://www.neuron.yale.edu/hg/neuron/nrn'])
            os.chdir('nrn')
            print co(['./build.sh'])
        else:
            print co(['wget', 'http://www.neuron.yale.edu/ftp/neuron/versions/v7.3/nrn-7.3.tar.gz'])
            print co(['tar', 'xzvf', 'nrn-7.3.tar.gz'])
            print co(['mv', 'nrn-7.3', 'nrn'])
            os.chdir('nrn')
            
        path = os.getcwd()
        pyexec = sys.executable
        co(["./configure --prefix=%s --without-iv --with-nrnpython=%s"%(path,pyexec)], shell=True)
        print co(['make'])
        print co(['make', 'install'])

        os.chdir('src/nrnpython')
        run_setup('./setup.py', ['install'])
Ejemplo n.º 48
0
    def get_name(self):
        loc = self.path or self.uri
        if loc:
            self._uri_scheme = "path" if self.path else "uri"
        name = None
        if self.link and self.link.egg_fragment:
            return self.link.egg_fragment
        elif self.link and self.link.is_wheel:
            return Wheel(self.link.filename).name
        if (
            self._uri_scheme != "uri"
            and self.path
            and self.setup_path
            and self.setup_path.exists()
        ):
            from distutils.core import run_setup

            old_curdir = os.path.abspath(os.getcwd())
            try:
                os.chdir(str(self.setup_path.parent))
                dist = run_setup(self.setup_path.as_posix(), stop_after="init")
                name = dist.get_name()
            except (FileNotFoundError, IOError) as e:
                dist = None
            except Exception as e:
                from .._compat import InstallRequirement, make_abstract_dist

                try:
                    if not isinstance(Path, self.path):
                        _path = Path(self.path)
                    else:
                        _path = self.path
                    if self.editable:
                        _ireq = InstallRequirement.from_editable(_path.as_uri())
                    else:
                        _ireq = InstallRequirement.from_line(_path.as_posix())
                    dist = make_abstract_dist(_ireq).get_dist()
                    name = dist.project_name
                except (TypeError, ValueError, AttributeError) as e:
                    dist = None
            finally:
                os.chdir(old_curdir)
        hashed_loc = hashlib.sha256(loc.encode("utf-8")).hexdigest()
        hashed_name = hashed_loc[-7:]
        if not name or name == "UNKNOWN":
            self._has_hashed_name = True
            name = hashed_name
        if self.link and not self._has_hashed_name:
            self.link = Link("{0}#egg={1}".format(self.link.url, name))
        return name
Ejemplo n.º 49
0
def main (script, args):
    if len(args) != 1:
        raise SystemExit, (USAGE % vars()) + "\nWrong number of arguments"

    setup_script = args[0]
    dist = run_setup(setup_script, script_args=[], stop_after="init")
    print """\
%s is the setup script for %s; description:
%s

contact:  %s <%s>
info url: %s
licence:  %s
""" % (setup_script, dist.get_fullname(), dist.get_description(),
       dist.get_contact(), dist.get_contact_email(),
       dist.get_url(), dist.get_licence())
Ejemplo n.º 50
0
    def __call__(self, setup_args=setup_args):
        appl_physical_path = self.get_appl_physical_path()
        if 'APPL_PHYSICAL_PATH' not in os.environ:
            os.environ['APPL_PHYSICAL_PATH'] = str(appl_physical_path)

        stamp_path = os.path.join(appl_physical_path, self.stamp_filename)
        if os.path.exists(stamp_path):
            # clean up the stamp file regardless, we tried
            os.remove(stamp_path)
        elif self.require_stamp:
            raise ValueError(
                'No IIS install stamp file found at {0}'.format(stamp_path))

        cwd = os.getcwd()
        try:
            os.chdir(appl_physical_path)
            if self.virtualenv:
                bootstrap = None
                if self.virtualenv is not True:
                    bootstrap = self.virtualenv
                executable = self.setup_virtualenv(bootstrap=bootstrap)

                # Install iiswsgi for the setup commands
                cmd = [
                    os.path.abspath(
                        os.path.join(
                            sysconfig.get_path(
                                'scripts', vars=dict(base=os.curdir)),
                            'easy_install' + sysconfig.get_config_var('EXE'))),
                    '--find-links',
                    distutils.sysconfig.get_python_lib(), 'PasteScript',
                    'iiswsgi'
                ]
                self.logger.info(
                    'Installing iiswsgi into virtualenv:\n{0}'.format(
                        ' '.join(cmd)))
                subprocess.check_call(cmd)

                cmd = [executable, 'setup.py'] + setup_args
                self.logger.info('Installing aplication:\n{0}'.format(
                    ' '.join(cmd)))
                return subprocess.check_call(cmd)
            self.logger.info('Installing aplication: setup.py {0}'.format(
                ' '.join(setup_args)))
            return core.run_setup('setup.py', script_args=setup_args)
        finally:
            os.chdir(cwd)
Ejemplo n.º 51
0
def _publish():
    """Publishes a PyJen release to PyPi"""
    modlog.info("publishing release...")

    # TODO: lay tag on release

    from distutils.core import run_setup
    try:
        # NOTE: default sphinx parameters are auto-loaded from the setup.cfg file
        distobj = run_setup("setup.py", ["bdist_wheel", "upload"])
        distobj.run_commands()
    except Exception as err:
        modlog.error("Failed to upload new package to PyPI.")
        modlog.error(err)
        exit(1)

    _update_pyjen_version()
    # TODO: Commit change to init.py
    modlog.info("release published successfully")
Ejemplo n.º 52
0
    def compile(self):
        import pycuda.autoinit
        assert self.code!=None, "Generate the code first"

        if self.gpu:

            from pycuda.compiler import SourceModule

            # mod = SourceModule(self.code)
            mod = SourceModule(self.code,no_extern_c=True)
            self.odeRK4_gpu = mod.get_function("odeRK4")

        else:            

            import tempfile, os

            fh = tempfile.NamedTemporaryFile(mode='w',suffix='.cpp')
            fh.write(self.code)
            fh.seek(0)
            
            os.environ["CC"]="g++"

            setup_script = \
"""from distutils.core import setup, Extension
module1 = Extension('odeRK4', sources = ["%(filename)s"], libraries = ['m'], extra_compile_args = ['-std=c++11'] )
setup(name = 'odeRK4',version = '1.0', ext_modules = [module1])""" % {"filename":fh.name}
            
            fh2 = tempfile.NamedTemporaryFile(mode='w',suffix='.py')
            fh2.write(setup_script)
            fh2.seek(0)
            
            from distutils.core import run_setup
            
            self.dist = run_setup(fh2.name)
            self.dist.run_command("build")
            
            import ctypes
            self.odeRK4 = ctypes.cdll.LoadLibrary("./build/lib.linux-x86_64-2.7/odeRK4.so").odeRK4        
Ejemplo n.º 53
0
    def add_msdeploy(self, path, *args):
        cwd = os.getcwd()
        try:
            os.chdir(path)
            distribution = self.distribution
            if os.path.abspath(path) != os.path.abspath(cwd):
                distribution = core.run_setup(
                    'setup.py', stop_after='commandline')

            distribution.build = distribution.get_command_obj('build')
            distribution.build.ensure_finalized()
            distribution.has_msdeploy_manifest = (
                'build_msdeploy' in distribution.build.get_sub_commands())
            if not distribution.has_msdeploy_manifest:
                raise errors.DistutilsFileError(
                    'No Web Deploy manifest found for {0}'.format(path))

            distribution.msdeploy_file = options.get_egg_name(
                distribution) + '.msdeploy.zip'
            distribution.msdeploy_package = os.path.abspath(
                os.path.join('dist', distribution.msdeploy_file))
            distribution.msdeploy_package_url = urlparse.urlunsplit((
                'file', '', urllib.pathname2url(distribution.msdeploy_package),
                '', ''))

            webpi_size = os.path.getsize(distribution.msdeploy_package)
            cmd = ['fciv', '-sha1', distribution.msdeploy_package]
            webpi_sha1 = ''
            try:
                webpi_sha1_output = subprocess.check_output(cmd)
            except OSError, error:
                if error.errno == errno.ENOENT:
                    logger.exception('Error getting SHA1:\n{0}'.format(
                        ' '.join(cmd)))
                else:
                    raise
            else:
Ejemplo n.º 54
0
def set_env(settings=None, setup_dir=""):
    """
    Used in management commands or at the module level of a fabfile to
    integrate woven project django.conf settings into fabric, and set the local current
    working directory to the distribution root (where setup.py lives).
    
    ``settings`` is your django settings module to pass in
    if you want to call this from a fabric script.
    
    ``setup_dir`` is an optional path to the directory containing setup.py
    This would be used in instances where setup.py was not above the cwd
    
    This function is used to set the environment for all hosts
   
    """

    # switch the working directory to the distribution root where setup.py is
    original_fabfile = env.fabfile
    env.fabfile = "setup.py"
    if setup_dir:
        fabfile_path = os.path.join(setup_dir, "setup.py")
    else:
        fabfile_path = find_fabfile()
    if not fabfile_path:
        print "Error: You must create a setup.py file in your distribution"
        sys.exit(1)

    local_working_dir = os.path.split(fabfile_path)[0]
    env.fabfile = original_fabfile
    os.chdir(local_working_dir)

    setup = run_setup("setup.py", stop_after="init")

    if setup.get_name() == "UNKNOWN" or setup.get_version() == "0.0.0" or not setup.packages:
        print "ERROR: You must define a minimum of name, version and packages in your setup.py"
        sys.exit(1)

    # project env variables for deployment
    env.project_name = setup.get_name()  # project_name()
    env.project_full_version = setup.get_version()  # local('python setup.py --version').rstrip()
    env.project_version = _parse_project_version(env.project_full_version)
    env.project_fullname = "-".join([env.project_name, env.project_version])
    env.project_package_name = setup.packages[0]
    env.patch = False

    # django settings are passed in by the command
    # We'll assume that if the settings aren't passed in we're running from a fabfile
    if not settings:
        sys.path.insert(0, local_working_dir)

        # import global settings
        project_settings = import_module(env.project_name + ".settings")
    else:

        project_settings = settings
    # If sqlite is used we can manage the database on first deployment
    env.DEFAULT_DATABASE_ENGINE = project_settings.DATABASES["default"]["ENGINE"]
    env.DEFAULT_DATABASE_NAME = project_settings.DATABASES["default"]["NAME"]

    # overwrite with main sitesettings module
    # just for MEDIA_URL, ADMIN_MEDIA_PREFIX, and STATIC_URL
    # if this settings file exists
    try:
        site_settings = import_module(".".join([env.project_name, "sitesettings.settings"]))
        project_settings.MEDIA_URL = site_settings.MEDIA_URL
        project_settings.ADMIN_MEDIA_PREFIX = site_settings.ADMIN_MEDIA_PREFIX
        project_settings.DATABASES = site_settings.DATABASES
        if hasattr(site_settings, "STATIC_URL"):
            project_settings.STATIC_URL = site_settings.STATIC_URL
        else:
            project_settings.STATIC_URL = project_settings.ADMIN_MEDIA_PREFIX
    except ImportError:
        pass

    # update woven_env from project_settings
    local_settings = dir(project_settings)
    # only get settings that woven uses
    for setting in local_settings:
        if setting.isupper() and hasattr(woven_env, setting):
            s = getattr(project_settings, setting, "")
            woven_env[setting] = s

    # upate the fabric env with all the woven settings
    env.update(woven_env)

    # set any user/password defaults if they are not supplied
    # Fabric would get the user from the options by default as the system user
    # We will overwrite that
    if woven_env.HOST_USER:
        env.user = woven_env.HOST_USER
    env.password = woven_env.HOST_PASSWORD

    # set the hosts if they aren't already
    if not env.hosts:
        env.hosts = woven_env.HOSTS
    if not env.roledefs:
        env.roledefs = woven_env.ROLEDEFS

    # reverse_lookup hosts to roles
    role_lookup = {}
    for role in env.roles:
        r_hosts = env.roledefs[role]
        for host in r_hosts:
            # since port is not handled by fabric.main.normalize we'll do it ourselves
            role_lookup["%s:%s" % (host, str(woven_env.HOST_SSH_PORT))] = role
    # now add any hosts that aren't already defined in roles
    for host in env.hosts:
        host_string = "%s:%s" % (host, str(woven_env.HOST_SSH_PORT))
        if host_string not in role_lookup.keys():
            role_lookup[host_string] = ""
    env.role_lookup = role_lookup
    env.hosts = role_lookup.keys()

    # remove any unneeded db adaptors - except sqlite
    remove_backends = ["postgresql_psycopg2", "mysql"]
    for db in project_settings.DATABASES:
        engine = project_settings.DATABASES[db]["ENGINE"].split(".")[-1]
        if engine in remove_backends:
            remove_backends.remove(engine)
    for backend in remove_backends:
        if backend == "postgresql_psycopg2":
            rm = "python-psycopg2"
        elif backend == "mysql":
            rm = "python-mysqldb"
        env.HOST_BASE_PACKAGES.remove(rm)

    # packages can be just the base + extra packages
    # or role dependent we need to just map out the packages to hosts and roles here
    packages = {}
    all_packages = set([])
    for role in env.roles:
        packages[role] = env.ROLE_PACKAGES.get(role, [])
        if not packages[role]:
            packages[role] = env.HOST_BASE_PACKAGES + env.HOST_EXTRA_PACKAGES
        all_packages = set(packages[role]) | all_packages

    # no role
    packages[""] = env.HOST_BASE_PACKAGES + env.HOST_EXTRA_PACKAGES
    all_packages = set(packages[""]) | all_packages

    # conveniently add gunicorn ppa
    if "gunicorn" in all_packages:
        if "ppa:bchesneau/gunicorn" not in env.LINUX_PACKAGE_REPOSITORIES:
            env.LINUX_PACKAGE_REPOSITORIES.append("ppa:bchesneau/gunicorn")

    env.packages = packages

    # sanity check for unwanted combinations in the empty role
    u = set(packages[""])
    wsgi = u & set(["gunicorn", "uwsgi"])
    if wsgi and "apache2" in u:
        u = u - set(["apache2", "libapache2-mod-wsgi"])
    env.packages[""] = list(u)

    # per host
    env.installed_packages = {}
    env.uninstalled_packages = {}

    # UFW firewall rules
    firewall_rules = {}
    for role in env.roles:
        firewall_rules[role] = env.ROLE_UFW_RULES.get(role, [])
    firewall_rules[""] = env.UFW_RULES
    env.firewall_rules = firewall_rules

    # Now update the env with any settings that are not defined by woven but may
    # be used by woven or fabric
    env.MEDIA_ROOT = project_settings.MEDIA_ROOT
    env.MEDIA_URL = project_settings.MEDIA_URL
    env.ADMIN_MEDIA_PREFIX = project_settings.ADMIN_MEDIA_PREFIX
    if not env.STATIC_URL:
        env.STATIC_URL = project_settings.ADMIN_MEDIA_PREFIX
    env.TEMPLATE_DIRS = project_settings.TEMPLATE_DIRS

    # Set the server /etc/timezone
    env.TIME_ZONE = project_settings.TIME_ZONE
    # Used to detect certain apps eg South, static_builder
    env.INSTALLED_APPS = project_settings.INSTALLED_APPS

    # SSH key
    if env.SSH_KEY_FILENAME:
        env.KEY_FILENAME = env.SSH_KEY_FILENAME
    else:
        env.KEY_FILENAME = ""

    # noinput
    if not hasattr(env, "INTERACTIVE"):
        env.INTERACTIVE = True
    if not hasattr(env, "verbosity"):
        env.verbosity = 1

    # overwrite existing settings
    if not hasattr(env, "overwrite"):
        env.overwrite = False

    # South integration defaults
    env.nomigration = False
    env.manualmigration = False
    env.migration = ""

    env.root_disabled = False

    # Sites
    env.sites = {}
    env.shell = "/bin/bash --noprofile -l -c"
Ejemplo n.º 55
0
# Education agencies that are members of the Smarter Balanced Assessment
# Consortium as of August 1, 2014 are granted a worldwide, non-exclusive, fully
# paid-up, royalty-free, perpetual license, to access, use, execute, reproduce,
# display, distribute, perform and create derivative works of the software
# included in the Reporting Platform, including the source code to such software.
# This license includes the right to grant sublicenses by such consortium members
# to third party vendors solely for the purpose of performing services on behalf
# of such consortium member educational agencies.

import os

from setuptools import setup, find_packages
import shutil
from distutils.core import run_setup

here = os.path.abspath(os.path.dirname(__file__))

dependencies = [
    'edworker',
    'edschema',
    'edcore',
    'hpz_client']


for dependency in dependencies:
    pkg_path = os.path.abspath(here + "/../" + dependency + "/")
    os.chdir(pkg_path)
    run_setup("setup.py")
    os.chdir(here)
run_setup("setup.py")
Ejemplo n.º 56
0
        no_standalone = False

    import os
    with open(infilename) as f:
        asn1def = f.read()

    parse_tree = parser.parse_asn1(asn1def)
    modules = build_semantic_model(parse_tree)
    assert (len(modules) == 1)

    module_name = outfilename
    path = "."
    args = FakeArgs()

    # pull in the python-specific node implementations
    walker = Walker(modules[0], nodes)
    walker.walk()

    backend = CBackend(args, walker, module_name, path)
    backend.generate_code()


generate ('t0.asn', 't0')

# 2) build the cython extension in place.
from distutils.core import run_setup
run_setup ('setup.py', ['build_ext', '--inplace'])

# 3) run the test
execfile ("t0_c_test.py")
Ejemplo n.º 57
0
def _make_docs():
    """Generates the online documentation for the project"""
    modlog.info("Generating API documentation...")

    if _get_package_version("Sphinx") < (1, 2, 3):
        modlog.error("Unsupported Sphinx version detected: " + _get_package_version("Sphinx"))
        modlog.error("Please run the --prep_env operation to property configure your environment.")
        exit(1)

    source_dir = os.path.join(os.getcwd(), "docs")
    log_file = os.path.join(os.getcwd(), "logs", "sphinx.log")

    standard_output = StringIO()
    error_output = StringIO()

    # First we make sure the API docs are up to date
    try:
        from sphinx import apidoc
        with redirect_stdout(standard_output):
            with redirect_stderr(error_output):
                modlog.debug("Calling Sphinx to build API docs")
                # NOTE: The first parameter to main is assumed to be the name of the executable that called
                #       main, which in our case doesn't exist. So we give it an empty value
                return_code = apidoc.main(["", "--force", "--separate", "-o", source_dir, "pyjen"])

        if return_code is not None and return_code != 0:
            modlog.error("Failed to generate API docs ({0}).".format(return_code))
            exit(1)
    except Exception as err:
        modlog.error("Failed to generate API docs.")
        modlog.error(str(err))
        exit(1)

    # TODO: Do a git stat and if any file have been modified throw a warning to the user

    # Purge any previous build artifacts
    build_dir = os.path.join(os.getcwd(), "build", "sphinx")
    if os.path.exists(build_dir):
        modlog.debug("Purging Sphinx output folder")
        shutil.rmtree(build_dir)

    modlog.debug("Creating empty build folder")
    os.makedirs(build_dir)

    # Generate the full online documentation in HTML format
    from distutils.core import run_setup
    try:
        with redirect_stdout(standard_output):
            with redirect_stderr(error_output):
                modlog.debug("Calling setuptools to generate online docs")
                # NOTE: default sphinx parameters are auto-loaded from the setup.cfg file
                distobj = run_setup("setup.py", ["build_sphinx", "-q"])
                distobj.run_commands()
    except Exception as err:
        modlog.error("Failed to generate online documentation")
        modlog.error(err)
        exit(1)

    modlog.debug("Writing logs to disk")
    tmp_error_out = error_output.getvalue()
    if len(tmp_error_out) > 0:
        modlog.warning("Sphinx warnings detected. Check log file for details " + os.path.relpath(log_file, os.getcwd()))
    with open(log_file, mode='w') as fh:
        fh.write(standard_output.getvalue())
        fh.write(tmp_error_out)

    standard_output.close()
    error_output.close()
    # TODO: Optionally open the index.html for the online docs in default browser
    modlog.info("Documentation complete")
Ejemplo n.º 58
0
import brian
from update_release_date import setreleasedate

pathname = os.path.abspath(os.path.dirname(__file__))
    
setreleasedate()

os.chdir(pathname)

os.chdir('../../../.') # work from Brian's root

if os.path.exists('MANIFEST'):
    os.remove('MANIFEST')

run_setup('setup.py', ['bdist_wininst', '--plat-name=win32']) #to get the same file name on Linux and Windows
run_setup('setup.py', ['sdist', '--formats=gztar,zip'])
os.chdir('dist')
bname = 'brian-' + brian.__version__
bdate = str(datetime.date.today())
    
for ext in ['tar.gz', 'zip', 'win32.exe']:
    fname = bname + '-' + bdate + '.' + ext
    if os.path.exists(fname): 
        print 'Deleting "%s"' % fname
        os.remove(fname)
    
    oldfname = '%s.%s' % (bname, ext)
    print 'Renaming "%s" to "%s"' % (oldfname, fname)
    os.rename(oldfname, fname)    
Ejemplo n.º 59
0
def cli(verbose, dry_run, pre, rebuild, find_links, index_url, extra_index_url,
        cert, client_cert, trusted_host, header, index, emit_trusted_host, annotate,
        upgrade, upgrade_packages, output_file, allow_unsafe, generate_hashes,
        src_files, max_rounds):
    """Compiles requirements.txt from requirements.in specs."""
    log.verbose = verbose

    if len(src_files) == 0:
        if os.path.exists(DEFAULT_REQUIREMENTS_FILE):
            src_files = (DEFAULT_REQUIREMENTS_FILE,)
        elif os.path.exists('setup.py'):
            src_files = ('setup.py',)
            if not output_file:
                output_file = 'requirements.txt'
        else:
            raise click.BadParameter(("If you do not specify an input file, "
                                      "the default is {} or setup.py").format(DEFAULT_REQUIREMENTS_FILE))

    if len(src_files) == 1 and src_files[0] == '-':
        if not output_file:
            raise click.BadParameter('--output-file is required if input is from stdin')

    if len(src_files) > 1 and not output_file:
        raise click.BadParameter('--output-file is required if two or more input files are given.')

    if output_file:
        dst_file = output_file
    else:
        base_name = src_files[0].rsplit('.', 1)[0]
        dst_file = base_name + '.txt'

    if upgrade and upgrade_packages:
        raise click.BadParameter('Only one of --upgrade or --upgrade-package can be provided as an argument.')

    ###
    # Setup
    ###

    pip_command = get_pip_command()

    pip_args = []
    if find_links:
        for link in find_links:
            pip_args.extend(['-f', link])
    if index_url:
        pip_args.extend(['-i', index_url])
    if extra_index_url:
        for extra_index in extra_index_url:
            pip_args.extend(['--extra-index-url', extra_index])
    if cert:
        pip_args.extend(['--cert', cert])
    if client_cert:
        pip_args.extend(['--client-cert', client_cert])
    if pre:
        pip_args.extend(['--pre'])
    if trusted_host:
        for host in trusted_host:
            pip_args.extend(['--trusted-host', host])

    pip_options, _ = pip_command.parse_args(pip_args)

    session = pip_command._build_session(pip_options)
    repository = PyPIRepository(pip_options, session)

    # Proxy with a LocalRequirementsRepository if --upgrade is not specified
    # (= default invocation)
    if not upgrade and os.path.exists(dst_file):
        ireqs = parse_requirements(dst_file, finder=repository.finder, session=repository.session, options=pip_options)
        # Exclude packages from --upgrade-package/-P from the existing pins: We want to upgrade.
        upgrade_pkgs_key = {key_from_req(InstallRequirement.from_line(pkg).req) for pkg in upgrade_packages}
        existing_pins = {key_from_req(ireq.req): ireq
                         for ireq in ireqs
                         if is_pinned_requirement(ireq) and key_from_req(ireq.req) not in upgrade_pkgs_key}
        repository = LocalRequirementsRepository(existing_pins, repository)

    log.debug('Using indexes:')
    # remove duplicate index urls before processing
    repository.finder.index_urls = list(dedup(repository.finder.index_urls))
    for index_url in repository.finder.index_urls:
        log.debug('  {}'.format(index_url))

    if repository.finder.find_links:
        log.debug('')
        log.debug('Configuration:')
        for find_link in repository.finder.find_links:
            log.debug('  -f {}'.format(find_link))

    ###
    # Parsing/collecting initial requirements
    ###

    constraints = []
    for src_file in src_files:
        is_setup_file = os.path.basename(src_file) == 'setup.py'
        if is_setup_file or src_file == '-':
            # pip requires filenames and not files. Since we want to support
            # piping from stdin, we need to briefly save the input from stdin
            # to a temporary file and have pip read that.  also used for
            # reading requirements from install_requires in setup.py.
            tmpfile = tempfile.NamedTemporaryFile(mode='wt', delete=False)
            if is_setup_file:
                from distutils.core import run_setup
                dist = run_setup(src_file)
                tmpfile.write('\n'.join(dist.install_requires))
            else:
                tmpfile.write(sys.stdin.read())
            tmpfile.flush()
            constraints.extend(parse_requirements(
                tmpfile.name, finder=repository.finder, session=repository.session, options=pip_options))
        else:
            constraints.extend(parse_requirements(
                src_file, finder=repository.finder, session=repository.session, options=pip_options))

    # Filter out pip environment markers which do not match (PEP496)
    constraints = [req for req in constraints
                   if req.markers is None or req.markers.evaluate()]

    # Check the given base set of constraints first
    Resolver.check_constraints(constraints)

    try:
        resolver = Resolver(constraints, repository, prereleases=pre,
                            clear_caches=rebuild, allow_unsafe=allow_unsafe)
        results = resolver.resolve(max_rounds=max_rounds)
        if generate_hashes:
            hashes = resolver.resolve_hashes(results)
        else:
            hashes = None
    except PipToolsError as e:
        log.error(str(e))
        sys.exit(2)

    log.debug('')

    ##
    # Output
    ##

    # Compute reverse dependency annotations statically, from the
    # dependency cache that the resolver has populated by now.
    #
    # TODO (1a): reverse deps for any editable package are lost
    #            what SHOULD happen is that they are cached in memory, just
    #            not persisted to disk!
    #
    # TODO (1b): perhaps it's easiest if the dependency cache has an API
    #            that could take InstallRequirements directly, like:
    #
    #                cache.set(ireq, ...)
    #
    #            then, when ireq is editable, it would store in
    #
    #              editables[egg_name][link_without_fragment] = deps
    #              editables['pip-tools']['git+...ols.git@future'] = {'click>=3.0', 'six'}
    #
    #            otherwise:
    #
    #              self[as_name_version_tuple(ireq)] = {'click>=3.0', 'six'}
    #
    reverse_dependencies = None
    if annotate:
        reverse_dependencies = resolver.reverse_dependencies(results)

    writer = OutputWriter(src_files, dst_file, dry_run=dry_run,
                          emit_header=header, emit_index=index,
                          emit_trusted_host=emit_trusted_host,
                          annotate=annotate,
                          generate_hashes=generate_hashes,
                          default_index_url=repository.DEFAULT_INDEX_URL,
                          index_urls=repository.finder.index_urls,
                          trusted_hosts=pip_options.trusted_hosts,
                          format_control=repository.finder.format_control)
    writer.write(results=results,
                 unsafe_requirements=resolver.unsafe_constraints,
                 reverse_dependencies=reverse_dependencies,
                 primary_packages={key_from_req(ireq.req) for ireq in constraints if not ireq.constraint},
                 markers={key_from_req(ireq.req): ireq.markers
                          for ireq in constraints if ireq.markers},
                 hashes=hashes,
                 allow_unsafe=allow_unsafe)

    if dry_run:
        log.warning('Dry-run, so nothing updated.')
Ejemplo n.º 60
0
'''
NOTE: you need a .pypirc file to do this, you may need to set the
HOME env to where it is saved. Also note that any spaces in the
filename of HOME will cause it to not work, so use old style 8.3
equivalent name.

Also note that manifest.in may not work right with this?
'''
import os
from distutils.core import run_setup

pathname = os.path.abspath(os.path.dirname(__file__))
os.chdir(pathname)
os.chdir('../../../.') # work from Brian's root
run_setup('setup.py', ['register'])
run_setup('setup.py', ['sdist', 'bdist_wininst', 'upload'])