def run(self): ''' Finds all the tests modules in tests/, and runs them, exiting after they are all done ''' from tests.server import TestServer from tests.test_core import WebserviceTest log.set_verbosity(self.verbose) if self.verbose >= 2: self.announce('Setting log level to DEBUG ({0})'.format(logging.DEBUG), level = 2) logging.basicConfig(level = logging.DEBUG) testfiles = [ ] if self.testmodule is None: for t in glob(pjoin(self._dir, 'tests', self.test_prefix + '*.py')): if not t.endswith('__init__.py'): testfiles.append('.'.join(['tests', splitext(basename(t))[0]])) else: testfiles.append(self.testmodule) server = TestServer(daemonise = True, silent = (self.verbose < 3)) server.start() WebserviceTest.TEST_PORT = server.port self.announce("Waiting for test server to start on port " + str(server.port), level=2) time.sleep(1) self.announce("Test files:" + str(testfiles), level=2) tests = TestLoader().loadTestsFromNames(testfiles) t = TextTestRunner(verbosity = self.verbose) result = t.run(tests) failed, errored = map(len, (result.failures, result.errors)) exit(failed + errored)
def run(self): ''' Finds all the tests modules in tests/, and runs them, exiting after they are all done ''' from tests.testserver import TestServer from tests.test import WebserviceTest log.set_verbosity(self.verbose) server = TestServer() server.start() WebserviceTest.TEST_PORT = server.port self.announce("Waiting for test server to start on port " + str(server.port), level=2) time.sleep(1) testfiles = [ ] for t in glob(pjoin(self._dir, 'tests', self.test_prefix + '*.py')): if not t.endswith('__init__.py'): testfiles.append('.'.join( ['tests', splitext(basename(t))[0]]) ) self.announce("Test files:" + str(testfiles), level=2) tests = TestLoader().loadTestsFromNames(testfiles) t = TextTestRunner(verbosity = self.verbose) t.run(tests) exit()
def run(self): parser = self.arg_parser # parse options --file and --verbose self.options, self.argv = parser.parse_known_args(self.argv) log.set_verbosity(self.options.verbose) parser.add_argument( '-h', '--help', action='help', default=argparse.SUPPRESS, help=_('show this help message and exit')) self.subparser = parser.add_subparsers(help="taget help", dest="target") try: # load native context with PakefileContext(self): file = self.options.file if file is None: if os.path.isfile(PAKEFILE_NAME): file = PAKEFILE_NAME else: self.load() return # load root context with PakefileContext(self, file): self.load() except PakeError, e: log.error("Error: %s" % e.message)
def setup_python3(): # Taken from "distribute" setup.py from distutils.filelist import FileList from distutils import dir_util, file_util, util, log from os.path import join tmp_src = join("build", "src") log.set_verbosity(1) fl = FileList() for line in open("MANIFEST.in"): if not line.strip(): continue fl.process_template_line(line) dir_util.create_tree(tmp_src, fl.files) outfiles_2to3 = [] for f in fl.files: outf, copied = file_util.copy_file(f, join(tmp_src, f), update=1) if copied and outf.endswith(".py"): outfiles_2to3.append(outf) util.run_2to3(outfiles_2to3) # arrange setup to use the copy sys.path.insert(0, tmp_src) return tmp_src
def run(self): if self.verbose != self.distribution.verbose: log.set_verbosity(self.verbose) try: for spec in self.args: self.easy_install(spec) finally: log.set_verbosity(self.distribution.verbose)
def parse_command_line(self): """Parse the utility's command line, taken from the 'script_args' instance attribute (which defaults to 'sys.argv[1:]'. This list is first processed for "global options" -- options that set attributes of the CMDHelper instance. Then, it is alternately scanned for command line commands and options for that command. Each new command terminates the options for the previous command. The allowed options for a command are determined by the 'user_options' attribute of the command class -- thus, we have to be able to load command classes in order to parse the command line. Any error in that 'options' attribute raises CMDHelperGetoptError; any error on the command-line raises CMDHelperArgError. If no cmdhelper commands were found on the command line, raises CMDHelperArgError. Return true if command-line was successfully parsed and we should carry on with executing commands; false if no errors but we shouldn't execute commands (currently, this only happens if user asks for help). """ toplevel_options = self._get_toplevel_options() # We have to parse the command line a bit at a time -- global # options, then the first command, then its options, and so on -- # because each command will be handled by a different class, and # the options that are valid for a particular class aren't known # until we have loaded the command class, which doesn't happen # until we know what the command is. self.commands = [] parser = FancyGetopt(toplevel_options + self.display_options) parser.set_negative_aliases(self.negative_opt) args = parser.getopt(args=self.script_args, object=self) option_order = parser.get_option_order() log.set_verbosity(self.verbose) # for display options we return immediately if self.handle_display_options(option_order): return while args: args = self._parse_command_opts(parser, args) if args is None: # user asked for help (and got it) return # Handle the cases of --help as a "global" option, ie. # "some_utility.py --help" and "some_utility.py --help command ...". # For the former, we show global options (--verbose, --dry-run, etc.) # and display-only options (--help-commands, etc.); for the # latter, we omit the display-only options and show help for # each command listed on the command line. if self.help or not self.commands: self._show_help(parser, display_options=len(self.commands) == 0, commands=self.commands) return # All is well: return true return 1
def compile(self, name): path = shell.native(name) term.write("%(ERASE)s%(BOLD)s>>> Compiling %(name)s...%(NORMAL)s", name=name) from distutils import util try: from distutils import log except ImportError: util.byte_compile([path], verbose=0, force=True) else: log.set_verbosity(0) util.byte_compile([path], force=True)
def __init__(self, verbosity=1, build_base=None): from distutils.dist import Distribution from distutils import log log.set_verbosity(verbosity) self._dist = Distribution() self._compilers = {} self._cmds = {} if build_base: opt_dict = self._dist.get_option_dict("build") opt_dict["build_base"] = ("bento", build_base) build = self._dist.get_command_obj("build") self._build_base = build.build_base
def build_and_install(self, setup_script, setup_base): args = ['build'] try: self.run_setup(setup_script, setup_base, args) built = os.listdir(setup_base)[0] setup_base = os.path.join(setup_base, built, 'build') built = os.listdir(setup_base)[0] setup_base = os.path.join(setup_base, built) to_move = os.listdir(setup_base) for m in to_move: old = os.path.join(self.install_dir, m) if os.path.exists(old): shutil.rmtree(old) shutil.move(os.path.join(setup_base, m), self.install_dir) finally: log.set_verbosity(self.verbose) # restore our log verbosity
def run(self): ''' Finds all the tests modules in tests/, and runs them, exiting after they are all done ''' log.set_verbosity(self.verbose) testfiles = [ ] for t in glob(pjoin(self._dir, 'tests', 'test*.py')): if not t.endswith('__init__.py'): testfiles.append('.'.join( ['tests', splitext(basename(t))[0]]) ) self.announce("Test files:" + str(testfiles), level=2) tests = TestLoader().loadTestsFromNames(testfiles) t = TextTestRunner(verbosity = self.verbose) t.run(tests) exit()
def _post_install(): import subprocess from distutils import log log.set_verbosity(log.DEBUG) try: # Enable the required nbextension for ipywidgets subprocess.call(["jupyter", "nbextension", "enable", "--py", "widgetsnbextension"]) # Enable the GenePattern Notebook extension subprocess.call(["jupyter", "nbextension", "install", "--py", "genepattern"]) subprocess.call(["jupyter", "nbextension", "enable", "--py", "genepattern"]) subprocess.call(["jupyter", "serverextension", "enable", "--py", "genepattern"]) except: log.warn("Unable to automatically enable GenePattern extension for Jupyter.\n" + "Please manually enable the extension by running the following commands:\n" + "jupyter nbextension enable --py widgetsnbextension\n" + "jupyter nbextension install --py genepattern\n" + "jupyter nbextension enable --py genepattern\n" + "jupyter serverextension enable --py genepattern\n")
def parse_command_line(self): """Parse the setup script's command line, taken from the 'script_args' instance attribute (which defaults to 'sys.argv[1:]' -- see 'setup()' in core.py). This list is first processed for "global options" -- options that set attributes of the Distribution instance. Then, it is alternately scanned for Distutils commands and options for that command. Each new command terminates the options for the previous command. The allowed options for a command are determined by the 'user_options' attribute of the command class -- thus, we have to be able to load command classes in order to parse the command line. Any error in that 'options' attribute raises DistutilsGetoptError; any error on the command-line raises DistutilsArgError. If no Distutils commands were found on the command line, raises DistutilsArgError. Return true if command-line was successfully parsed and we should carry on with executing commands; false if no errors but we shouldn't execute commands (currently, this only happens if user asks for help). """ toplevel_options = self._get_toplevel_options() self.commands = [] parser = FancyGetopt(toplevel_options + self.display_options) parser.set_negative_aliases(self.negative_opt) parser.set_aliases({"licence": "license"}) args = parser.getopt(args=self.script_args, object=self) option_order = parser.get_option_order() log.set_verbosity(self.verbose) if self.handle_display_options(option_order): return else: while args: args = self._parse_command_opts(parser, args) if args is None: return if self.help: self._show_help(parser, display_options=len(self.commands) == 0, commands=self.commands) return if not self.commands: raise DistutilsArgError, "no commands supplied" return 1
def setup(**attrs): script_name = attrs.pop('scripts_name', os.path.basename(sys.argv[0])) script_args = attrs.pop('scripts_args', sys.argv[1:]) # Some distutils arguments should be recognized parser = argparse.ArgumentParser(prog=script_name, add_help=False) parser.add_argument('-h', '--help', action='store_true') parser.add_argument('--help-commands', action='store_true') parser.add_argument('-v', '--verbose', dest='verbosity', action='count', default=1) parser.add_argument('-q', '--quiet', dest='verbosity', action='store_const', const=0) known_args, ignore = parser.parse_known_args(script_args) log.set_verbosity(known_args.verbosity) if not (known_args.help or known_args.help_commands): substitutions = attrs.pop('substitutions', {}) substitutions.update({ # Autoconf-style metadata 'PACKAGE_NAME': attrs.get('name'), 'PACKAGE_VERSION': attrs.get('version'), 'PACKAGE_AUTHOR': attrs.get('author'), 'PACKAGE_AUTHOR_EMAIL': attrs.get('author_email'), 'PACKAGE_LICENSE': attrs.get('license'), 'PACKAGE_URL': attrs.get('url'), }) cache_filename = 'config.cache' if 'configure' in script_args: script_args, cli_substitutions = parse_commandline_substitutions(script_args) substitutions.update(cli_substitutions) flatten(substitutions) write_cache(cache_filename, substitutions) else: substitutions.update(parse_cache(cache_filename)) for name, value in attrs.items(): attrs[name] = substitute(value, substitutions) attrs['substitutions'] = substitutions return setuptools.setup(script_name=script_name, script_args=script_args, **attrs)
def install(): print '\nStarting yaru Gwibber plugin install...' print_title('Installing required pyyaru library...') easy_install.main(['-U', 'pyyaru']) dest_plugin = '/usr/share/gwibber/plugins/yaru/' dest_plugin_exists = os.path.exists(dest_plugin) if dest_plugin_exists: set_verbosity(1) print_title('Previous plugin installation found. Reinstall initiated...') remove_tree(dest_plugin, 1) copy_tree('gwibber/microblog/plugins/yaru/', dest_plugin, verbose=1) copy_tree('ui/', '/usr/share/gwibber/ui/', verbose=1) print_title('Killing gwibber-service...') subproc = Popen('killall -v gwibber-service', shell=True) subproc.wait() print_title('Done.')
def run(self): """ Finds all the tests modules in tests/, and runs them, exiting after they are all done """ log.set_verbosity(self.verbose) server = TestServer() server.start() WebserviceTest.TEST_PORT = server.port self.announce("Waiting for test server", level=2) time.sleep(1) testfiles = [] for t in glob(pjoin(self._dir, "tests", "test*.py")): if not t.endswith("__init__.py"): testfiles.append(".".join(["tests", splitext(basename(t))[0]])) self.announce("Test files:" + str(testfiles), level=2) tests = TestLoader().loadTestsFromNames(testfiles) t = TextTestRunner(verbosity=self.verbose) t.run(tests) exit()
def cython(filename, verbose=0, compile_message=False, use_cache=False, create_local_c_file=False, annotate=True, sage_namespace=True, create_local_so_file=False): r""" Compile a Cython file. This converts a Cython file to a C (or C++ file), and then compiles that. The .c file and the .so file are created in a temporary directory. INPUT: - ``filename`` -- the name of the file to be compiled. Should end with 'pyx'. - ``verbose`` (integer, default 0) -- level of verbosity. A negative value ensures complete silence. - ``compile_message`` (bool, default False) -- if True, print ``'Compiling <filename>...'`` to the standard error. - ``use_cache`` (bool, default False) -- if True, check the temporary build directory to see if there is already a corresponding .so file. If so, and if the .so file is newer than the Cython file, don't recompile, just reuse the .so file. - ``create_local_c_file`` (bool, default False) -- if True, save a copy of the ``.c`` or ``.cpp`` file in the current directory. - ``annotate`` (bool, default True) -- if True, create an html file which annotates the conversion from .pyx to .c. By default this is only created in the temporary directory, but if ``create_local_c_file`` is also True, then save a copy of the .html file in the current directory. - ``sage_namespace`` (bool, default True) -- if True, import ``sage.all``. - ``create_local_so_file`` (bool, default False) -- if True, save a copy of the compiled .so file in the current directory. OUTPUT: a tuple ``(name, dir)`` where ``name`` is the name of the compiled module and ``dir`` is the directory containing the generated files. TESTS: Before :trac:`12975`, it would have been needed to write ``#clang c++``, but upper case ``C++`` has resulted in an error. Using pkgconfig to find the libraries, headers and macros. This is a work around while waiting for :trac:`22461` which will offer a better solution:: sage: code = [ ....: "#clang C++", ....: "from sage.rings.polynomial.multi_polynomial_libsingular cimport MPolynomial_libsingular", ....: "from sage.libs.singular.polynomial cimport singular_polynomial_pow", ....: "def test(MPolynomial_libsingular p):", ....: " singular_polynomial_pow(&p._poly, p._poly, 2, p._parent_ring)"] sage: cython(os.linesep.join(code)) The function ``test`` now manipulates internal C data of polynomials, squaring them:: sage: P.<x,y>=QQ[] sage: test(x) sage: x x^2 Check that compiling C++ code works:: sage: cython("# distutils: language = c++\n"+ ....: "from libcpp.vector cimport vector\n" ....: "cdef vector[int] * v = new vector[int](4)\n") Check that compiling C++ code works when creating a local C file, first moving to a tempdir to avoid clutter. Before :trac:`22113`, the create_local_c_file argument was not tested for C++ code:: sage: import sage.misc.cython sage: d = sage.misc.temporary_file.tmp_dir() sage: os.chdir(d) sage: with open("test.pyx", 'w') as f: ....: _ = f.write("# distutils: language = c++\n" ....: "from libcpp.vector cimport vector\n" ....: "cdef vector[int] * v = new vector[int](4)\n") sage: output = sage.misc.cython.cython("test.pyx", create_local_c_file=True) Accessing a ``.pxd`` file from the current directory works:: sage: import sage.misc.cython sage: d = sage.misc.temporary_file.tmp_dir() sage: os.chdir(d) sage: with open("helper.pxd", 'w') as f: ....: f.write("cdef inline int the_answer(): return 42") sage: cython(''' ....: from helper cimport the_answer ....: print(the_answer()) ....: ''') 42 Warning and error messages generated by Cython are properly handled. Warnings are only shown if verbose >= 0:: sage: code = ''' ....: def test_unreachable(): ....: raise Exception ....: return 42 ....: ''' sage: cython(code, verbose=-1) sage: cython(code, verbose=0) warning: ...:4:4: Unreachable code sage: cython("foo = bar\n") Traceback (most recent call last): ... RuntimeError: Error compiling Cython file: ------------------------------------------------------------ ... foo = bar ^ ------------------------------------------------------------ <BLANKLINE> ...:1:6: undeclared name not builtin: bar sage: cython("cdef extern from 'no_such_header_file': pass") Traceback (most recent call last): ... RuntimeError: ... Sage used to automatically include various ``.pxi`` files. Since :trac:`22805`, we no longer do this. But we make sure to give a useful message in case the ``.pxi`` files were needed:: sage: cython("sig_malloc(0)\n") Traceback (most recent call last): ... RuntimeError: Error compiling Cython file: ------------------------------------------------------------ ... sig_malloc(0) ^ ------------------------------------------------------------ <BLANKLINE> ...:1:0: undeclared name not builtin: sig_malloc <BLANKLINE> NOTE: Sage no longer automatically includes the deprecated files "cdefs.pxi", "signals.pxi" and "stdsage.pxi" in Cython files. You can fix your code by adding "from cysignals.memory cimport sig_malloc". """ if not filename.endswith('pyx'): print("Warning: file (={}) should have extension .pyx".format(filename), file=sys.stderr) # base is the name of the .so module that we create. If we are # creating a local shared object file, we use a more natural # naming convention. If we are not creating a local shared object # file, the main constraint is that it is unique and determined by # the file that we're running Cython on, so that in some cases we # can cache the result (e.g., recompiling the same pyx file during # the same session). if create_local_so_file: base, ext = os.path.splitext(os.path.basename(filename)) else: base = os.path.abspath(filename) base = sanitize(base) # This is the *temporary* directory where we store the pyx file. # This is deleted when Sage exits, which means pyx files must be # rebuilt every time Sage is restarted at present. target_dir = os.path.join(SPYX_TMP, base) # Build directory for Cython/distutils build_dir = os.path.join(target_dir, "build") if os.path.exists(target_dir): # There is already a module here. Maybe we do not have to rebuild? # Find the name. if use_cache: from sage.misc.sageinspect import loadable_module_extension prev_so = [F for F in os.listdir(target_dir) if F.endswith(loadable_module_extension())] if len(prev_so) > 0: prev_so = prev_so[0] # should have length 1 because of deletes below if os.path.getmtime(filename) <= os.path.getmtime('%s/%s'%(target_dir, prev_so)): # We do not have to rebuild. return prev_so[:-len(loadable_module_extension())], target_dir # Delete all ordinary files in target_dir for F in os.listdir(target_dir): G = os.path.join(target_dir, F) if os.path.isdir(G): continue try: os.unlink(G) except OSError: pass else: sage_makedirs(target_dir) if create_local_so_file: name = base else: global sequence_number if base not in sequence_number: sequence_number[base] = 0 name = '%s_%s'%(base, sequence_number[base]) # increment the sequence number so will use a different one next time. sequence_number[base] += 1 if compile_message: print("Compiling {}...".format(filename), file=sys.stderr) sys.stderr.flush() with open(filename) as f: (preparsed, libs, includes, language, additional_source_files, extra_args, libdirs) = _pyx_preparse(f.read()) # New filename with preparsed code. # NOTE: if we ever stop preparsing, we should still copy the # original file to the target directory. pyxfile = os.path.join(target_dir, name + ".pyx") with open(pyxfile, 'w') as f: f.write(preparsed) extra_sources = [] for fname in additional_source_files: fname = fname.replace("$SAGE_SRC", SAGE_SRC) fname = fname.replace("$SAGE_LOCAL", SAGE_LOCAL) extra_sources.append(fname) # Add current working directory to includes. This is needed because # we cythonize from a different directory. See Trac #24764. includes.insert(0, os.getcwd()) # Now do the actual build, directly calling Cython and distutils from Cython.Build import cythonize from Cython.Compiler.Errors import CompileError import Cython.Compiler.Options from distutils.dist import Distribution from distutils.core import Extension from distutils.log import set_verbosity set_verbosity(verbose) Cython.Compiler.Options.annotate = annotate Cython.Compiler.Options.embed_pos_in_docstring = True Cython.Compiler.Options.pre_import = "sage.all" if sage_namespace else None ext = Extension(name, sources=[pyxfile] + extra_sources, libraries=libs, library_dirs=[os.path.join(SAGE_LOCAL, "lib")] + libdirs, extra_compile_args=extra_args, language=language) try: # Change directories to target_dir so that Cython produces the correct # relative path; https://trac.sagemath.org/ticket/24097 with restore_cwd(target_dir): try: ext, = cythonize([ext], aliases=cython_aliases(), include_path=includes, quiet=(verbose <= 0), errors_to_stderr=False, use_listing_file=True) finally: # Read the "listing file" which is the file containing # warning and error messages generated by Cython. try: with open(name + ".lis") as f: cython_messages = f.read() except IOError: cython_messages = "Error compiling Cython file" except CompileError: # Check for names in old_pxi_names for pxd, names in old_pxi_names.items(): for name in names: if re.search(r"\b{}\b".format(name), cython_messages): cython_messages += dedent( """ NOTE: Sage no longer automatically includes the deprecated files "cdefs.pxi", "signals.pxi" and "stdsage.pxi" in Cython files. You can fix your code by adding "from {} cimport {}". """.format(pxd, name)) raise RuntimeError(cython_messages.strip()) if verbose >= 0: sys.stderr.write(cython_messages) sys.stderr.flush() if create_local_c_file: shutil.copy(os.path.join(target_dir, ext.sources[0]), os.curdir) if annotate: shutil.copy(os.path.join(target_dir, name + ".html"), os.curdir) # This emulates running "setup.py build" with the correct options dist = Distribution() dist.ext_modules = [ext] dist.include_dirs = includes buildcmd = dist.get_command_obj("build") buildcmd.build_base = build_dir buildcmd.build_lib = target_dir try: # Capture errors from distutils and its child processes with open(os.path.join(target_dir, name + ".err"), 'w+') as errfile: try: # Redirect stderr to errfile. We use the file descriptor # number "2" instead of "sys.stderr" because we really # want to redirect the messages from GCC. These are sent # to the actual stderr, regardless of what sys.stderr is. sys.stderr.flush() with redirection(2, errfile, close=False): dist.run_command("build") finally: errfile.seek(0) distutils_messages = errfile.read() except Exception as msg: msg = str(msg) + "\n" + distutils_messages raise RuntimeError(msg.strip()) if verbose >= 0: sys.stderr.write(distutils_messages) sys.stderr.flush() if create_local_so_file: # Copy module to current directory from sage.misc.sageinspect import loadable_module_extension shutil.copy(os.path.join(target_dir, name + loadable_module_extension()), os.curdir) return name, target_dir
lcmd, modext = munge_command("$^ $(LIBS)", objext, "modext", lcmd) self.reporter.link_command = lcmd self.reporter.modext = modext # The generated Makefile fragment should depend on the physical file for # every Distutils module that has been loaded by this program. def get_fragment_dependencies(): distutils_modules = sorted(m.__file__ for n, m in sys.modules.items() if n.startswith("distutils")) return " \\\n\t".join(distutils_modules) results = stub_build_ext_report() set_verbosity(1) fake_dist = Distribution({"ext_modules": "not empty"}) fake_build_ext = stub_build_ext(results, fake_dist) fake_build_ext.inplace = True fake_build_ext.dry_run = True fake_build_ext.finalize_options() fake_build_ext.run() # Sanity check. if (not results.objext or not results.modext or not results.compile_command or not results.link_command): raise SystemExit("failed to probe compilation environment") with open(sys.argv[1], "w") as f: f.write("""\ O = {objext}
def quiet_log(): # Running some of the other tests will automatically # change the log level to info, messing our output. log.set_verbosity(0)
$ python setup.py install ''' import os import stat import platform import subprocess from distutils import log as dlog from distutils.core import setup, Extension from distutils.command import install_lib from os.path import join as pjoin from os.path import relpath as rpath dlog.set_verbosity(dlog.INFO) try: from Cython.Build import cythonize except: print("Please install cython") raise try: import numpy.distutils.misc_util except: print("Please install numpy") raise try: import bitarray
def copyThemes(): log.set_verbosity(log.INFO) log.set_threshold(log.INFO) copy_tree('./themes', themesPath, verbose=1)
lcmd, modext = munge_command("$^ $(LIBS)", objext, "modext", lcmd) self.reporter.link_command = lcmd self.reporter.modext = modext # The generated Makefile fragment should depend on the physical file for # every Distutils module that has been loaded by this program. def get_fragment_dependencies(): distutils_modules = sorted(m.__file__ for n, m in sys.modules.items() if n.startswith("distutils")) return " \\\n\t".join(distutils_modules) results = stub_build_ext_report() set_verbosity(1) fake_dist = Distribution({"ext_modules": "not empty"}) fake_build_ext = stub_build_ext(results, fake_dist) fake_build_ext.inplace = True fake_build_ext.dry_run = True fake_build_ext.finalize_options() fake_build_ext.run() # Sanity check. if (not results.objext or not results.modext or not results.compile_command or not results.link_command): raise SystemExit("failed to probe compilation environment") with open(sys.argv[1], "w") as f:
"data_files", "scripts", "py_modules", "dependency_links", "setup_requires", "tests_require", "cmdclass") # setup() arguments that contain boolean values BOOL_FIELDS = ("use_2to3", "zip_safe", "include_package_data") CSV_FIELDS = ("keywords",) log.set_verbosity(log.INFO) def resolve_name(name): """Resolve a name like ``module.object`` to an object and return it. Raise ImportError if the module or name is not found. """ parts = name.split('.') cursor = len(parts) - 1 module_name = parts[:cursor] attr_name = parts[-1] while cursor > 0: try:
def cython(filename, verbose=0, compile_message=False, use_cache=False, create_local_c_file=False, annotate=True, sage_namespace=True, create_local_so_file=False): r""" Compile a Cython file. This converts a Cython file to a C (or C++ file), and then compiles that. The .c file and the .so file are created in a temporary directory. INPUT: - ``filename`` -- the name of the file to be compiled. Should end with 'pyx'. - ``verbose`` (integer, default 0) -- level of verbosity. - ``compile_message`` (bool, default False) -- if True, print ``'Compiling <filename>...'`` to the standard error. - ``use_cache`` (bool, default False) -- if True, check the temporary build directory to see if there is already a corresponding .so file. If so, and if the .so file is newer than the Cython file, don't recompile, just reuse the .so file. - ``create_local_c_file`` (bool, default False) -- if True, save a copy of the ``.c`` or ``.cpp`` file in the current directory. - ``annotate`` (bool, default True) -- if True, create an html file which annotates the conversion from .pyx to .c. By default this is only created in the temporary directory, but if ``create_local_c_file`` is also True, then save a copy of the .html file in the current directory. - ``sage_namespace`` (bool, default True) -- if True, import ``sage.all``. - ``create_local_so_file`` (bool, default False) -- if True, save a copy of the compiled .so file in the current directory. TESTS: Before :trac:`12975`, it would have been needed to write ``#clang c++``, but upper case ``C++`` has resulted in an error. Using pkgconfig to find the libraries, headers and macros. This is a work around while waiting for :trac:`22461` which will offer a better solution:: sage: code = [ ....: "#clang C++", ....: "from sage.rings.polynomial.multi_polynomial_libsingular cimport MPolynomial_libsingular", ....: "from sage.libs.singular.polynomial cimport singular_polynomial_pow", ....: "def test(MPolynomial_libsingular p):", ....: " singular_polynomial_pow(&p._poly, p._poly, 2, p._parent_ring)"] sage: cython(os.linesep.join(code)) The function ``test`` now manipulates internal C data of polynomials, squaring them:: sage: P.<x,y>=QQ[] sage: test(x) sage: x x^2 Check that compiling C++ code works:: sage: cython("# distutils: language = c++\n"+ ....: "from libcpp.vector cimport vector\n" ....: "cdef vector[int] * v = new vector[int](4)\n") Check that compiling C++ code works when creating a local C file, first moving to a tempdir to avoid clutter. Before :trac:`22113`, the create_local_c_file argument was not tested for C++ code:: sage: import sage.misc.cython sage: d = sage.misc.temporary_file.tmp_dir() sage: os.chdir(d) sage: with open("test.pyx", 'w') as f: ....: _ = f.write("# distutils: language = c++\n" ....: "from libcpp.vector cimport vector\n" ....: "cdef vector[int] * v = new vector[int](4)\n") sage: output = sage.misc.cython.cython("test.pyx", create_local_c_file=True) Sage used to automatically include various ``.pxi`` files. Since :trac:`22805`, we no longer do this. But we make sure to give a useful message in case the ``.pxi`` files were needed:: sage: cython("sig_malloc(0)") Traceback (most recent call last): ... RuntimeError: Error converting ... to C NOTE: Sage no longer automatically includes the deprecated files "cdefs.pxi", "signals.pxi" and "stdsage.pxi" in Cython files. You can fix your code by adding "from cysignals.memory cimport sig_malloc". """ if not filename.endswith('pyx'): print( "Warning: file (={}) should have extension .pyx".format(filename), file=sys.stderr) # base is the name of the .so module that we create. If we are # creating a local shared object file, we use a more natural # naming convention. If we are not creating a local shared object # file, the main constraint is that it is unique and determined by # the file that we're running Cython on, so that in some cases we # can cache the result (e.g., recompiling the same pyx file during # the same session). if create_local_so_file: base, ext = os.path.splitext(os.path.basename(filename)) base = sanitize(base) else: base = sanitize(os.path.abspath(filename)) # This is the *temporary* directory where we store the pyx file. # This is deleted when Sage exits, which means pyx files must be # rebuilt every time Sage is restarted at present. target_dir = os.path.join(SPYX_TMP, base) # Build directory for Cython/distutils build_dir = os.path.join(target_dir, "build") if os.path.exists(target_dir): # There is already a module here. Maybe we do not have to rebuild? # Find the name. if use_cache: from sage.misc.sageinspect import loadable_module_extension prev_so = [ F for F in os.listdir(target_dir) if F.endswith(loadable_module_extension()) ] if len(prev_so) > 0: prev_so = prev_so[ 0] # should have length 1 because of deletes below if os.path.getmtime(filename) <= os.path.getmtime( '%s/%s' % (target_dir, prev_so)): # We do not have to rebuild. return prev_so[:-len(loadable_module_extension() )], target_dir # Delete all ordinary files in target_dir for F in os.listdir(target_dir): G = os.path.join(target_dir, F) if os.path.isdir(G): continue try: os.unlink(G) except OSError: pass else: sage_makedirs(target_dir) if create_local_so_file: name = base else: global sequence_number if base not in sequence_number: sequence_number[base] = 0 name = '%s_%s' % (base, sequence_number[base]) # increment the sequence number so will use a different one next time. sequence_number[base] += 1 if compile_message: print("Compiling {}...".format(filename), file=sys.stderr) with open(filename) as f: (preparsed, libs, includes, language, additional_source_files, extra_args, libdirs) = _pyx_preparse(f.read()) # New filename with preparsed code. # NOTE: if we ever stop preparsing, we should still copy the # original file to the target directory. pyxfile = os.path.join(target_dir, name + ".pyx") with open(pyxfile, 'w') as f: f.write(preparsed) extra_sources = [] for fname in additional_source_files: fname = fname.replace("$SAGE_SRC", SAGE_SRC) fname = fname.replace("$SAGE_LOCAL", SAGE_LOCAL) extra_sources.append(fname) # Now do the actual build, directly calling Cython and distutils from Cython.Build import cythonize from Cython.Compiler.Errors import CompileError import Cython.Compiler.Options from distutils.dist import Distribution from distutils.core import Extension from distutils.log import set_verbosity set_verbosity(verbose) Cython.Compiler.Options.annotate = annotate Cython.Compiler.Options.embed_pos_in_docstring = True Cython.Compiler.Options.pre_import = "sage.all" if sage_namespace else None ext = Extension(name, sources=[pyxfile] + extra_sources, libraries=libs, library_dirs=[os.path.join(SAGE_LOCAL, "lib")] + libdirs, extra_compile_args=extra_args, language=language) orig_cwd = os.getcwd() try: # Change directories to target_dir so that Cython produces the correct # relative path; https://trac.sagemath.org/ticket/24097 os.chdir(target_dir) ext, = cythonize([ext], aliases=cython_aliases(), include_path=includes, quiet=not verbose) except CompileError: # Check for names in old_pxi_names note = '' for pxd, names in old_pxi_names.items(): for name in names: if re.search(r"\b{}\b".format(name), preparsed): note += dedent(""" NOTE: Sage no longer automatically includes the deprecated files "cdefs.pxi", "signals.pxi" and "stdsage.pxi" in Cython files. You can fix your code by adding "from {} cimport {}". """.format(pxd, name)) raise RuntimeError("Error converting {} to C".format(filename) + note) finally: os.chdir(orig_cwd) if create_local_c_file: shutil.copy(os.path.join(target_dir, ext.sources[0]), os.curdir) if annotate: shutil.copy(os.path.join(target_dir, name + ".html"), os.curdir) # This emulates running "setup.py build" with the correct options dist = Distribution() dist.ext_modules = [ext] dist.include_dirs = includes buildcmd = dist.get_command_obj("build") buildcmd.build_base = build_dir buildcmd.build_lib = target_dir dist.run_command("build") if create_local_so_file: # Copy module to current directory from sage.misc.sageinspect import loadable_module_extension shutil.copy( os.path.join(target_dir, name + loadable_module_extension()), os.curdir) return name, target_dir
EB_VERSION = '.'.join(VERSION.split('.')[0:2]) suff = '' rc_regexp = re.compile("^.*(rc[0-9]*)$") res = rc_regexp.search(str(VERSION)) if res: suff = res.group(1) dev_regexp = re.compile("^.*[0-9]dev$") if dev_regexp.match(VERSION): suff = 'dev' API_VERSION += suff EB_VERSION += suff # log levels: 0 = WARN (default), 1 = INFO, 2 = DEBUG log.set_verbosity(1) # try setuptools, fall back to distutils if needed try: from setuptools import setup log.info("Installing with setuptools.setup...") install_package = 'setuptools' except ImportError, err: log.info( "Failed to import setuptools.setup (%s), so falling back to distutils.setup" % err) from distutils.core import setup install_package = 'distutils'
else: return GnuFCompiler.get_flags_opt(self) def _can_target(cmd, arch): """Return true is the command supports the -arch flag for the given architecture.""" newcmd = cmd[:] newcmd.extend(["-arch", arch, "-v"]) p = Popen(newcmd, stderr=STDOUT, stdout=PIPE) stdout, stderr = p.communicate() if p.returncode == 0: for line in stdout.splitlines(): m = re.search(_R_ARCHS[arch], line) if m: return True return False if __name__ == '__main__': from distutils import log log.set_verbosity(2) compiler = GnuFCompiler() compiler.customize() print compiler.get_version() raw_input('Press ENTER to continue...') try: compiler = Gnu95FCompiler() compiler.customize() print compiler.get_version() except Exception, msg: print msg raw_input('Press ENTER to continue...')
from distutils import log import pkg_resources from setuptools.command import easy_install from setuptools.command import egg_info from setuptools.command import install from setuptools.command import install_scripts from setuptools.command import sdist try: import cStringIO as io except ImportError: import io from pbr import extra_files log.set_verbosity(log.INFO) TRUE_VALUES = ('true', '1', 'yes') REQUIREMENTS_FILES = ('requirements.txt', 'tools/pip-requires') TEST_REQUIREMENTS_FILES = ('test-requirements.txt', 'tools/test-requires') # part of the standard library starting with 2.7 # adding it to the requirements list screws distro installs BROKEN_ON_27 = ('argparse', 'importlib') def get_requirements_files(): files = os.environ.get("PBR_REQUIREMENTS_FILES") if files: return tuple(f.strip() for f in files.split(',')) return REQUIREMENTS_FILES
from subprocess import check_call import os import sys import platform here = os.path.dirname(os.path.abspath(__file__)) node_root = os.path.join(here, 'js') is_repo = os.path.exists(os.path.join(here, '.git')) npm_path = os.pathsep.join([ os.path.join(node_root, 'node_modules', '.bin'), os.environ.get('PATH', os.defpath), ]) from distutils import log log.set_verbosity(log.DEBUG) log.info('setup.py entered') log.info('$PATH=%s' % os.environ['PATH']) LONG_DESCRIPTION = 'first try to wrap up nvd3 pie chart into jupyter notebook widget' def js_prerelease(command, strict=False): """decorator for building minified js/css prior to another command""" class DecoratedCommand(command): def run(self): jsdeps = self.distribution.get_command_obj('jsdeps') if not is_repo and all(os.path.exists(t) for t in jsdeps.targets): # sdist, nothing to do command.run(self) return
# executables=[Executable(script = 'illuminator.py', # base = base, # icon = None, # compress = True, # copyDependentFiles = True, # appendScriptToLibrary = True)] # ) ## distutils import bbfreeze #from distutils.core import setup from setuptools import setup, find_packages from distutils import log log.set_verbosity(20) setup(name='Illuminator', version='1.0', description='Ilululumin', author='BrikSkag', author_email='poseidone', url='http://', #packages=['illuminator'], packages = find_packages(), verbose=20, #scripts=['illuminator.py'] entry_points = {'gui_scripts':'illuminator = illuminator:main'}, package_data={'':['*.ico']} )
from distutils.core import Extension from distutils.command.build_ext import build_ext from distutils.dist import Distribution from distutils.msvccompiler import MSVCCompiler from distutils import log import os import shutil if os.path.dirname(__file__): os.chdir(os.path.dirname(__file__)) log.set_verbosity(3) distribution = Distribution({ 'ext_modules': [Extension('_checker', sources=['_checker.c'])] }) class Command(build_ext): def build_extensions(self): if isinstance(self.compiler, MSVCCompiler): self.compiler.initialize() self.compiler.compile_options.remove('/W3') self.compiler.compile_options.remove('/MD') if '/GS-' in self.compiler.compile_options: self.compiler.compile_options.remove('/GS-') self.compiler.compile_options += ['/Ox', '/W4', '/EHsc', '/GL', '/MT'] self.compiler.ldflags_shared += ['/OPT:REF,ICF', '/LTCG'] else: self.distribution.ext_modules[0].extra_compile_args = ['-march=native', '-O3'] build_ext.build_extensions(self)
# import build and build_ext using a different name, # to allow subclassing them from distutils.command.build import build as _build from distutils.command.build_ext import build_ext as _build_ext #from distutils.command.install import install as _install from distutils.command.install_lib import install_lib as _install_lib # make the build process more silent # (this setting is also passed on to InstallBUFRInterfaceECMWF) VERBOSE = False if not VERBOSE: # set the logging to WARN only from distutils.log import set_verbosity set_verbosity(0) # patch distutils if it can't cope with the "classifiers" or # "download_url" keywords [is this still needed?] if version < '2.2.3': from distutils.dist import DistributionMetadata DistributionMetadata.classifiers = None DistributionMetadata.download_url = None # #] # an alternative might be to use a setup version that seems present # in the numpy module, see: # http://www2-pcmdi.llnl.gov/cdat/tutorials/f2py-wrapping-fortran-code/\ # part-4-packaging-all-this-into-something-that-can-be-distributed-\ # very-advanced # and http://www.scipy.org/Documentation/numpy_distutils
import os import sys import shutil from distutils.core import Extension from distutils.command.build_ext import build_ext from distutils.dist import Distribution from distutils.msvccompiler import MSVCCompiler from distutils import log if os.path.dirname(__file__): os.chdir(os.path.dirname(__file__)) log.set_verbosity(3) distribution = Distribution({ 'ext_modules': [Extension('_checker', sources=['_checker.c'])] }) class Command(build_ext): def build_extensions(self): if isinstance(self.compiler, MSVCCompiler): self.compiler.initialize() self.compiler.compile_options.remove('/W3') self.compiler.compile_options.remove('/MD') if '/GS-' in self.compiler.compile_options: self.compiler.compile_options.remove('/GS-') self.compiler.compile_options += ['/Ox', '/W4', '/EHsc', '/GL', '/MT'] self.compiler.ldflags_shared += ['/OPT:REF,ICF', '/LTCG'] else: if os.uname()[4].startswith('arm') or 'redist' in sys.argv:
def cython(filename, verbose=0, compile_message=False, use_cache=False, create_local_c_file=False, annotate=True, sage_namespace=True, create_local_so_file=False): r""" Compile a Cython file. This converts a Cython file to a C (or C++ file), and then compiles that. The .c file and the .so file are created in a temporary directory. INPUT: - ``filename`` -- the name of the file to be compiled. Should end with 'pyx'. - ``verbose`` (integer, default 0) -- level of verbosity. A negative value ensures complete silence. - ``compile_message`` (bool, default False) -- if True, print ``'Compiling <filename>...'`` to the standard error. - ``use_cache`` (bool, default False) -- if True, check the temporary build directory to see if there is already a corresponding .so file. If so, and if the .so file is newer than the Cython file, don't recompile, just reuse the .so file. - ``create_local_c_file`` (bool, default False) -- if True, save a copy of the ``.c`` or ``.cpp`` file in the current directory. - ``annotate`` (bool, default True) -- if True, create an html file which annotates the conversion from .pyx to .c. By default this is only created in the temporary directory, but if ``create_local_c_file`` is also True, then save a copy of the .html file in the current directory. - ``sage_namespace`` (bool, default True) -- if True, import ``sage.all``. - ``create_local_so_file`` (bool, default False) -- if True, save a copy of the compiled .so file in the current directory. OUTPUT: a tuple ``(name, dir)`` where ``name`` is the name of the compiled module and ``dir`` is the directory containing the generated files. TESTS: Before :trac:`12975`, it would have been needed to write ``#clang c++``, but upper case ``C++`` has resulted in an error. Using pkgconfig to find the libraries, headers and macros. This is a work around while waiting for :trac:`22461` which will offer a better solution:: sage: code = [ ....: "#clang C++", ....: "from sage.rings.polynomial.multi_polynomial_libsingular cimport MPolynomial_libsingular", ....: "from sage.libs.singular.polynomial cimport singular_polynomial_pow", ....: "def test(MPolynomial_libsingular p):", ....: " singular_polynomial_pow(&p._poly, p._poly, 2, p._parent_ring)"] sage: cython(os.linesep.join(code)) The function ``test`` now manipulates internal C data of polynomials, squaring them:: sage: P.<x,y>=QQ[] sage: test(x) sage: x x^2 Check that compiling C++ code works:: sage: cython("# distutils: language = c++\n"+ ....: "from libcpp.vector cimport vector\n" ....: "cdef vector[int] * v = new vector[int](4)\n") Check that compiling C++ code works when creating a local C file, first moving to a tempdir to avoid clutter. Before :trac:`22113`, the create_local_c_file argument was not tested for C++ code:: sage: d = sage.misc.temporary_file.tmp_dir() sage: os.chdir(d) sage: with open("test.pyx", 'w') as f: ....: _ = f.write("# distutils: language = c++\n" ....: "from libcpp.vector cimport vector\n" ....: "cdef vector[int] * v = new vector[int](4)\n") sage: output = sage.misc.cython.cython("test.pyx", create_local_c_file=True) Accessing a ``.pxd`` file from the current directory works:: sage: d = sage.misc.temporary_file.tmp_dir() sage: os.chdir(d) sage: with open("helper.pxd", 'w') as f: ....: _ = f.write("cdef inline int the_answer(): return 42") sage: cython(''' ....: from helper cimport the_answer ....: print(the_answer()) ....: ''') 42 Warning and error messages generated by Cython are properly handled. Warnings are only shown if verbose >= 0:: sage: code = ''' ....: def test_unreachable(): ....: raise Exception ....: return 42 ....: ''' sage: cython(code, verbose=-1) sage: cython(code, verbose=0) warning: ...:4:4: Unreachable code sage: cython("foo = bar\n") Traceback (most recent call last): ... RuntimeError: Error compiling Cython file: ------------------------------------------------------------ ... foo = bar ^ ------------------------------------------------------------ <BLANKLINE> ...:1:6: undeclared name not builtin: bar sage: cython("cdef extern from 'no_such_header_file': pass") Traceback (most recent call last): ... RuntimeError: ... """ if not filename.endswith('pyx'): print( "Warning: file (={}) should have extension .pyx".format(filename), file=sys.stderr) # base is the name of the .so module that we create. If we are # creating a local shared object file, we use a more natural # naming convention. If we are not creating a local shared object # file, the main constraint is that it is unique and determined by # the file that we're running Cython on, so that in some cases we # can cache the result (e.g., recompiling the same pyx file during # the same session). if create_local_so_file: base, ext = os.path.splitext(os.path.basename(filename)) else: base = os.path.abspath(filename) base = sanitize(base) # This is the *temporary* directory where we store the pyx file. # This is deleted when Sage exits, which means pyx files must be # rebuilt every time Sage is restarted at present. target_dir = os.path.join(SPYX_TMP, base) # Build directory for Cython/distutils build_dir = os.path.join(target_dir, "build") if os.path.exists(target_dir): # There is already a module here. Maybe we do not have to rebuild? # Find the name. if use_cache: from sage.misc.sageinspect import loadable_module_extension prev_so = [ F for F in os.listdir(target_dir) if F.endswith(loadable_module_extension()) ] if len(prev_so) > 0: prev_so = prev_so[ 0] # should have length 1 because of deletes below if os.path.getmtime(filename) <= os.path.getmtime( '%s/%s' % (target_dir, prev_so)): # We do not have to rebuild. return prev_so[:-len(loadable_module_extension() )], target_dir # Delete all ordinary files in target_dir for F in os.listdir(target_dir): G = os.path.join(target_dir, F) if os.path.isdir(G): continue try: os.unlink(G) except OSError: pass else: sage_makedirs(target_dir) if create_local_so_file: name = base else: global sequence_number if base not in sequence_number: sequence_number[base] = 0 name = '%s_%s' % (base, sequence_number[base]) # increment the sequence number so will use a different one next time. sequence_number[base] += 1 if compile_message: sys.stderr.write("Compiling {}...\n".format(filename)) sys.stderr.flush() # Copy original file to the target directory. pyxfile = os.path.join(target_dir, name + ".pyx") shutil.copy(filename, pyxfile) # Add current working directory to includes. This is needed because # we cythonize from a different directory. See Trac #24764. includes = [os.getcwd()] + sage_include_directories() # Now do the actual build, directly calling Cython and distutils from Cython.Build import cythonize from Cython.Compiler.Errors import CompileError import Cython.Compiler.Options from distutils.dist import Distribution from distutils.core import Extension from distutils.log import set_verbosity set_verbosity(verbose) Cython.Compiler.Options.annotate = annotate Cython.Compiler.Options.embed_pos_in_docstring = True Cython.Compiler.Options.pre_import = "sage.all" if sage_namespace else None ext = Extension( name, sources=[pyxfile], extra_compile_args=["-w"], # no warnings libraries=standard_libs, library_dirs=standard_libdirs) directives = dict(language_level=sys.version_info[0]) try: # Change directories to target_dir so that Cython produces the correct # relative path; https://trac.sagemath.org/ticket/24097 with restore_cwd(target_dir): try: ext, = cythonize([ext], aliases=cython_aliases(), include_path=includes, compiler_directives=directives, quiet=(verbose <= 0), errors_to_stderr=False, use_listing_file=True) finally: # Read the "listing file" which is the file containing # warning and error messages generated by Cython. try: with open(name + ".lis") as f: cython_messages = f.read() except IOError: cython_messages = "Error compiling Cython file" except CompileError: raise RuntimeError(cython_messages.strip()) if verbose >= 0: sys.stderr.write(cython_messages) sys.stderr.flush() if create_local_c_file: shutil.copy(os.path.join(target_dir, ext.sources[0]), os.curdir) if annotate: shutil.copy(os.path.join(target_dir, name + ".html"), os.curdir) # This emulates running "setup.py build" with the correct options dist = Distribution() dist.ext_modules = [ext] dist.include_dirs = includes buildcmd = dist.get_command_obj("build") buildcmd.build_base = build_dir buildcmd.build_lib = target_dir try: # Capture errors from distutils and its child processes with open(os.path.join(target_dir, name + ".err"), 'w+') as errfile: try: # Redirect stderr to errfile. We use the file descriptor # number "2" instead of "sys.stderr" because we really # want to redirect the messages from GCC. These are sent # to the actual stderr, regardless of what sys.stderr is. sys.stderr.flush() with redirection(2, errfile, close=False): dist.run_command("build") finally: errfile.seek(0) distutils_messages = errfile.read() except Exception as msg: msg = str(msg) + "\n" + distutils_messages raise RuntimeError(msg.strip()) if verbose >= 0: sys.stderr.write(distutils_messages) sys.stderr.flush() if create_local_so_file: # Copy module to current directory from sage.misc.sageinspect import loadable_module_extension shutil.copy( os.path.join(target_dir, name + loadable_module_extension()), os.curdir) return name, target_dir
total_size = str(total_size) + " gigabytes" elif total_size >= megabyte: total_size = total_size / megabyte #Get MB value total_size = round(total_size, 2) #Round to 2 d.p. total_size = str(total_size) + " megabytes" elif total_size >= kilobyte: total_size = total_size / kilobyte #Get KB value total_size = round(total_size, 2) #Round to 2 d.p. total_size = str(total_size) + " kilobytes" else: total_size = str(total_size) + " bytes" print("Directory size: " + str(total_size) + " (" + str(bytes_size) + " bytes)") except Exception as Error: print("Error: " + str(Error)) if success == False: pass #copy module if copy == True: fromDirectory = input("From directory: ") toDirectory = input("To directory: ") #fromDirectory = str(fromDirectory).replace("\\", "\\\\") #toDirectory = str(toDirectory).replace("\\", "\\\\") #distutils is okay try: print(log.set_verbosity(log.INFO)) print(log.set_threshold(log.INFO)) copy_tree(fromDirectory, toDirectory) except Exception as Error: print("Error: " + str(Error))
def main(): if sys.argv[1] == "Debug": print("OpenSSL debug builds aren't supported.") elif sys.argv[1] != "Release": raise ValueError('Unrecognized configuration: %s' % sys.argv[1]) if sys.argv[2] == "Win32": platform = "VC-WIN32" suffix = '32' elif sys.argv[2] == "x64": platform = "VC-WIN64A" suffix = '64' else: raise ValueError('Unrecognized platform: %s' % sys.argv[2]) # Have the distutils functions display information output log.set_verbosity(1) # Use the same properties that are used in the VS projects solution_dir = os.path.dirname(__file__) propfile = os.path.join(solution_dir, 'pyproject.vsprops') props = get_project_properties(propfile) # Ensure we have the necessary external depenedencies ssl_dir = os.path.join(solution_dir, props['opensslDir']) if not os.path.isdir(ssl_dir): print("Could not find the OpenSSL sources, try running " "'build.bat -e'") sys.exit(1) # Ensure the executables used herein are available. if not find_executable('nmake.exe'): print('Could not find nmake.exe, try running env.bat') sys.exit(1) # add our copy of NASM to PATH. It will be on the same level as openssl externals_dir = os.path.join(solution_dir, props['externalsDir']) for dir in os.listdir(externals_dir): if dir.startswith('nasm'): nasm_dir = os.path.join(externals_dir, dir) nasm_dir = os.path.abspath(nasm_dir) old_path = os.environ['PATH'] os.environ['PATH'] = os.pathsep.join([nasm_dir, old_path]) break else: if not find_executable('nasm.exe'): print('Could not find nasm.exe, please add to PATH') sys.exit(1) # If the ssl makefiles do not exist, we invoke PCbuild/prepare_ssl.py # to generate them. platform_makefile = MK1MF_FMT.format(suffix) if not os.path.isfile(os.path.join(ssl_dir, platform_makefile)): pcbuild_dir = os.path.join(os.path.dirname(externals_dir), 'PCbuild') prepare_ssl = os.path.join(pcbuild_dir, 'prepare_ssl.py') rc = subprocess.call([sys.executable, prepare_ssl, ssl_dir]) if rc: print('Executing', prepare_ssl, 'failed (error %d)' % rc) sys.exit(rc) old_cd = os.getcwd() try: os.chdir(ssl_dir) # Get the variables defined in the current makefile, if it exists. makefile = MK1MF_FMT.format('') try: makevars = parse_makefile(makefile) except EnvironmentError: makevars = {'PLATFORM': None} # Rebuild the makefile when building for different a platform than # the last run. if makevars['PLATFORM'] != platform: print("Updating the makefile...") sys.stdout.flush() # Firstly, apply the changes for the platform makefile into # a temporary file to prevent any errors from this script # causing false positives on subsequent runs. new_makefile = makefile + '.new' fix_makefile(new_makefile, platform_makefile, suffix) makevars = parse_makefile(new_makefile) # Secondly, perform the make recipes that use Perl copy_files(new_makefile, makevars) # Set our build information in buildinf.h. # XXX: This isn't needed for a properly "prepared" SSL, but # it fixes the current checked-in external (as of 2017-05). fix_buildinf(makevars) # Finally, move the temporary file to its real destination. if os.path.exists(makefile): os.remove(makefile) os.rename(new_makefile, makefile) # Now run make. makeCommand = "nmake /nologo /f \"%s\" lib" % makefile print("Executing ssl makefiles:", makeCommand) sys.stdout.flush() rc = os.system(makeCommand) if rc: print("Executing", makefile, "failed (error %d)" % rc) sys.exit(rc) finally: os.chdir(old_cd) sys.exit(rc)
def parse_command_line(self): """Parse the setup script's command line, taken from the 'script_args' instance attribute (which defaults to 'sys.argv[1:]' -- see 'setup()' in core.py). This list is first processed for "global options" -- options that set attributes of the Distribution instance. Then, it is alternately scanned for Distutils commands and options for that command. Each new command terminates the options for the previous command. The allowed options for a command are determined by the 'user_options' attribute of the command class -- thus, we have to be able to load command classes in order to parse the command line. Any error in that 'options' attribute raises DistutilsGetoptError; any error on the command-line raises DistutilsArgError. If no Distutils commands were found on the command line, raises DistutilsArgError. Return true if command-line was successfully parsed and we should carry on with executing commands; false if no errors but we shouldn't execute commands (currently, this only happens if user asks for help). """ # # We now have enough information to show the Macintosh dialog # that allows the user to interactively specify the "command line". # toplevel_options = self._get_toplevel_options() # We have to parse the command line a bit at a time -- global # options, then the first command, then its options, and so on -- # because each command will be handled by a different class, and # the options that are valid for a particular class aren't known # until we have loaded the command class, which doesn't happen # until we know what the command is. self.commands = [] parser = FancyGetopt(toplevel_options + self.display_options) parser.set_negative_aliases(self.negative_opt) parser.set_aliases({'licence': 'license'}) args = parser.getopt(args=self.script_args, object=self) option_order = parser.get_option_order() log.set_verbosity(self.verbose) # for display options we return immediately if self.handle_display_options(option_order): return while args: args = self._parse_command_opts(parser, args) if args is None: # user asked for help (and got it) return # Handle the cases of --help as a "global" option, ie. # "setup.py --help" and "setup.py --help command ...". For the # former, we show global options (--verbose, --dry-run, etc.) # and display-only options (--name, --version, etc.); for the # latter, we omit the display-only options and show help for # each command listed on the command line. if self.help: self._show_help(parser, display_options=len(self.commands) == 0, commands=self.commands) return # Oops, no commands found -- an end-user error if not self.commands: raise DistutilsArgError, "no commands supplied" # All is well: return true return 1
'compiler_fix' : ["f90"], 'compiler_f90' : ["f90"], 'linker_so' : None, 'archiver' : ["ar", "-cr"], 'ranlib' : ["ranlib"] } module_dir_switch = None #XXX: fix me module_include_switch = None #XXX: fix me pic_flags = ['+pic=long'] def get_flags(self): return self.pic_flags + ['+ppu', '+DD64'] def get_flags_opt(self): return ['-O3'] def get_libraries(self): return ['m'] def get_library_dirs(self): opt = ['/usr/lib/hpux64'] return opt def get_version(self, force=0, ok_status=[256,0,1]): # XXX status==256 may indicate 'unrecognized option' or # 'no input file'. So, version_cmd needs more work. return FCompiler.get_version(self,force,ok_status) if __name__ == '__main__': from distutils import log log.set_verbosity(10) from numpy.distutils.fcompiler import new_fcompiler compiler = new_fcompiler(compiler='hpux') compiler.customize() print compiler.get_version()
# import build and build_ext using a different name, # to allow subclassing them from distutils.command.build import build as _build from distutils.command.build_ext import build_ext as _build_ext #from distutils.command.install import install as _install from distutils.command.install_lib import install_lib as _install_lib # make the build process more silent # (this setting is also passed on to InstallBUFRInterfaceECMWF) verbose = False if not verbose: # set the logging to WARN only from distutils.log import set_verbosity set_verbosity(0) # patch distutils if it can't cope with the "classifiers" or # "download_url" keywords from sys import version if version < '2.2.3': from distutils.dist import DistributionMetadata DistributionMetadata.classifiers = None DistributionMetadata.download_url = None # #] # an alternative might be to use a setup version that seems present # in the numpy module, see: # http://www2-pcmdi.llnl.gov/cdat/tutorials/f2py-wrapping-fortran-code/\ # part-4-packaging-all-this-into-something-that-can-be-distributed-\ # very-advanced
def parse_command_line (self): """Parse the setup script's command line, taken from the 'script_args' instance attribute (which defaults to 'sys.argv[1:]' -- see 'setup()' in core.py). This list is first processed for "global options" -- options that set attributes of the Distribution instance. Then, it is alternately scanned for Distutils commands and options for that command. Each new command terminates the options for the previous command. The allowed options for a command are determined by the 'user_options' attribute of the command class -- thus, we have to be able to load command classes in order to parse the command line. Any error in that 'options' attribute raises DistutilsGetoptError; any error on the command-line raises DistutilsArgError. If no Distutils commands were found on the command line, raises DistutilsArgError. Return true if command-line was successfully parsed and we should carry on with executing commands; false if no errors but we shouldn't execute commands (currently, this only happens if user asks for help). """ # # We now have enough information to show the Macintosh dialog # that allows the user to interactively specify the "command line". # toplevel_options = self._get_toplevel_options() if sys.platform == 'mac': import EasyDialogs cmdlist = self.get_command_list() self.script_args = EasyDialogs.GetArgv( toplevel_options + self.display_options, cmdlist) # We have to parse the command line a bit at a time -- global # options, then the first command, then its options, and so on -- # because each command will be handled by a different class, and # the options that are valid for a particular class aren't known # until we have loaded the command class, which doesn't happen # until we know what the command is. self.commands = [] parser = FancyGetopt(toplevel_options + self.display_options) parser.set_negative_aliases(self.negative_opt) parser.set_aliases({'licence': 'license'}) args = parser.getopt(args=self.script_args, object=self) option_order = parser.get_option_order() log.set_verbosity(self.verbose) # for display options we return immediately if self.handle_display_options(option_order): return while args: args = self._parse_command_opts(parser, args) if args is None: # user asked for help (and got it) return # Handle the cases of --help as a "global" option, ie. # "setup.py --help" and "setup.py --help command ...". For the # former, we show global options (--verbose, --dry-run, etc.) # and display-only options (--name, --version, etc.); for the # latter, we omit the display-only options and show help for # each command listed on the command line. if self.help: self._show_help(parser, display_options=len(self.commands) == 0, commands=self.commands) return # Oops, no commands found -- an end-user error if not self.commands: raise DistutilsArgError, "no commands supplied" # All is well: return true return 1
'compiler_fix' : ["f90"], 'compiler_f90' : ["f90"], 'linker_so' : ["ld", "-b"], 'archiver' : ["ar", "-cr"], 'ranlib' : ["ranlib"] } module_dir_switch = None #XXX: fix me module_include_switch = None #XXX: fix me pic_flags = ['+Z'] def get_flags(self): return self.pic_flags + ['+ppu', '+DD64'] def get_flags_opt(self): return ['-O3'] def get_libraries(self): return ['m'] def get_library_dirs(self): opt = ['/usr/lib/hpux64'] return opt def get_version(self, force=0, ok_status=[256, 0, 1]): # XXX status==256 may indicate 'unrecognized option' or # 'no input file'. So, version_cmd needs more work. return FCompiler.get_version(self, force, ok_status) if __name__ == '__main__': from distutils import log log.set_verbosity(10) from numpy.distutils.fcompiler import new_fcompiler compiler = new_fcompiler(compiler='hpux') compiler.customize() print(compiler.get_version())
rc_regexp = re.compile("^.*(rc[0-9]*)$") res = rc_regexp.search(str(VERSION)) if res: suff = res.group(1) dev_regexp = re.compile("^.*[0-9](.?dev[0-9])$") res = dev_regexp.search(VERSION) if res: suff = res.group(1) API_VERSION += suff EB_VERSION += suff # log levels: 0 = WARN (default), 1 = INFO, 2 = DEBUG log.set_verbosity(1) # try setuptools, fall back to distutils if needed try: from setuptools import setup log.info("Installing with setuptools.setup...") install_package = 'setuptools' except ImportError, err: log.info("Failed to import setuptools.setup (%s), so falling back to distutils.setup" % err) from distutils.core import setup install_package = 'distutils' # utility function to read README file def read(fname): """Read contents of given file."""
version_pattern = r'G95 \((GCC (?P<gccversion>[\d.]+)|.*?) \(g95 (?P<version>.*)!\) (?P<date>.*)\).*' # $ g95 --version # G95 (GCC 4.0.3 (g95 0.90!) Aug 22 2006) executables = { 'version_cmd' : ["<F90>", "--version"], 'compiler_f77' : ["g95", "-ffixed-form"], 'compiler_fix' : ["g95", "-ffixed-form"], 'compiler_f90' : ["g95"], 'linker_so' : ["<F90>", "-shared"], 'archiver' : ["ar", "-cr"], 'ranlib' : ["ranlib"] } pic_flags = ['-fpic'] module_dir_switch = '-fmod=' module_include_switch = '-I' def get_flags(self): return ['-fno-second-underscore'] def get_flags_opt(self): return ['-O'] def get_flags_debug(self): return ['-g'] if __name__ == '__main__': from distutils import log log.set_verbosity(2) compiler = G95FCompiler() compiler.customize() print(compiler.get_version())
from __future__ import print_function from setuptools import setup, find_packages, Command from setuptools.command.sdist import sdist from setuptools.command.build_py import build_py from setuptools.command.egg_info import egg_info from distutils.command.install import install from distutils import log import os from shutil import copy from subprocess import check_call import sys here = os.path.dirname(os.path.abspath(__file__)) log.set_verbosity(log.DEBUG) log.info('setup.py entered') log.info('$PATH=%s' % os.environ['PATH']) LONG_DESCRIPTION = 'Python wrapper for deck.gl' PATH_TO_WIDGET = '../../../modules/jupyter-widget' node_root = os.path.join(here, PATH_TO_WIDGET) npm_path = os.pathsep.join([ os.path.join(node_root, 'node_modules', '.bin'), os.environ.get('PATH', os.defpath), ]) def update_package_data(distribution):
def compile_shared(csource, modulename, output_dir): """Compile '_testcapi.c' or '_ctypes_test.c' into an extension module, and import it. """ thisdir = os.path.dirname(__file__) assert output_dir is not None from distutils.ccompiler import new_compiler from distutils import log log.set_verbosity(3) compiler = new_compiler() compiler.output_dir = output_dir # Compile .c file include_dir = os.path.join(thisdir, '..', 'include') if sys.platform == 'win32': ccflags = ['-D_CRT_SECURE_NO_WARNINGS'] else: ccflags = ['-fPIC', '-Wimplicit-function-declaration'] res = compiler.compile( [os.path.join(thisdir, csource)], include_dirs=[include_dir], extra_preargs=ccflags, ) object_filename = res[0] # set link options output_filename = modulename + _get_c_extension_suffix() if sys.platform == 'win32': libname = 'python{0[0]}{0[1]}'.format(sys.version_info) library = os.path.join(thisdir, '..', 'libs', libname) if not os.path.exists(library + '.lib'): # For a local translation or nightly build library = os.path.join(thisdir, '..', 'pypy', 'goal', libname) assert os.path.exists( library + '.lib'), 'Could not find import library "%s"' % library libraries = [library, 'oleaut32'] extra_ldargs = [ '/MANIFEST', # needed for VC10 '/EXPORT:PyInit_' + modulename ] else: libraries = [] extra_ldargs = [] # link the dynamic library compiler.link_shared_object([object_filename], output_filename, libraries=libraries, extra_preargs=extra_ldargs) # Now import the newly created library, it will replace the original # module in sys.modules fp, filename, description = imp.find_module(modulename, path=[output_dir]) with fp: imp.load_module(modulename, fp, filename, description) # If everything went fine up to now, write the name of this new # directory to 'hashed_fn', for future processes (and to avoid a # growing number of temporary directories that are not completely # obvious to clean up on Windows) hashed_fn = _get_hashed_filename(os.path.join(thisdir, csource)) try: with open(hashed_fn, 'w') as f: f.write(os.path.basename(output_dir)) except IOError: pass