def finalize_options(self): """Finalize the options.""" if self.bdist_dir is None: bdist_base = self.get_finalized_command("bdist").bdist_base self.bdist_dir = os.path.join(bdist_base, "dist") self.set_undefined_options("bdist", ("dist_dir", "dist_dir")) python_version = get_python_version() pyver = python_version[0:2] # Change classifiers new_classifiers = [] for classifier in self.distribution.metadata.classifiers: if classifier.startswith("Programming Language ::") and \ pyver not in classifier: self.log.info("removing classifier %s" % classifier) continue new_classifiers.append(classifier) self.distribution.metadata.classifiers = new_classifiers with open("README.txt", "r") as file_handler: license = file_handler.read() self.distribution.metadata.long_description += \ "\n{}".format(license) if self.debug: self.log.setLevel(logging.DEBUG) log.set_threshold(1) # Set Distutils logging level to DEBUG
def __init__(self, config_file='lambda.json', zip_file=None, with_pyc=False): self.config_file = config_file self.config_data = {} self.with_pyc = with_pyc self.venv = None self.build = None self.sdist = None self.files = [] log.set_threshold(log.ERROR) self.read_config() if zip_file: self.zip_file = zip_file else: self.zip_file = '%s.zip' % self.config_data.get('name', 'lambda') self.zip = None self.prepare_tox() self.prepare_setuptools()
def finalize_options(self): """Finalize the options.""" if self.debug: self.log.setLevel(logging.DEBUG) log.set_threshold(1) # Set Distutils logging level to DEBUG cmd_build_ext = self.distribution.get_command_obj("build_ext") cmd_build_ext.with_mysql_capi = (self.with_mysql_capi or os.environ.get("MYSQL_CAPI")) cmd_build_ext.with_openssl_include_dir = ( self.with_openssl_include_dir or os.environ.get("OPENSSL_INCLUDE_DIR")) cmd_build_ext.with_openssl_lib_dir = ( self.with_openssl_lib_dir or os.environ.get("OPENSSL_LIB_DIR")) cmd_build_ext.with_protobuf_include_dir = ( self.with_protobuf_include_dir or os.environ.get("PROTOBUF_INCLUDE_DIR")) cmd_build_ext.with_protobuf_lib_dir = ( self.with_protobuf_lib_dir or os.environ.get("PROTOBUF_LIB_DIR")) cmd_build_ext.with_protoc = (self.with_protoc or os.environ.get("PROTOC")) cmd_build_ext.extra_compile_args = ( self.extra_compile_args or os.environ.get("EXTRA_COMPILE_ARGS")) cmd_build_ext.extra_link_args = (self.extra_link_args or os.environ.get("EXTRA_LINK_ARGS")) self._copy_vendor_libraries()
def finalize_options(self, *args, **kwargs): build_ext.finalize_options(self, *args, **kwargs) self.verbose = True ## Grab the OpenMP flags openmpFlags, openmpLibs = get_openmp() ## Grab the FFTW flags if self.with_fftw is not None: fftwFlags = ['-I%s/include' % self.with_fftw,] fftwLibs = ['-L%s/lib' % self.with_fftw, '-lfftw3f'] else: fftwFlags, fftwLibs = get_fftw() ## Update the extensions with the additional compilier/linker flags for ext in self.extensions: ### Compiler flags for cflags in (openmpFlags, fftwFlags): try: ext.extra_compile_args.extend( cflags ) except TypeError: ext.extra_compile_args = cflags ### Linker flags for ldflags in (openmpLibs, fftwLibs): try: ext.extra_link_args.extend( ldflags ) except TypeError: ext.extra_link_args = ldflags ## HACK: Update the log verbosity - for some reason this gets set to ## WARN when I replace build_ext log.set_threshold(min([log.INFO, log._global_log.threshold]))
def BuildExtension(sources, output_dir, extension_name): from distutils import log from distutils.core import Distribution, Extension import os import tempfile build_dir = tempfile.mkdtemp() # Source file paths must be relative to current path. cwd = os.getcwd() src_files = [os.path.relpath(filename, cwd) for filename in sources] ext = Extension(extension_name, src_files) if os.name == 'nt': _FixDistutilsMsvcCompiler() # VS 2010 does not generate manifest, see http://bugs.python.org/issue4431 ext.extra_link_args = ['/MANIFEST'] dist = Distribution({ 'ext_modules': [ext] }) dist.script_args = ['build_ext', '--build-temp', build_dir, '--build-lib', output_dir] dist.parse_command_line() log.set_threshold(log.DEBUG) dist.run_commands() dist.script_args = ['clean', '--build-temp', build_dir, '--all'] dist.parse_command_line() log.set_threshold(log.DEBUG) dist.run_commands()
def getversion_svn_setuptools(path=None): """Get version info for a Subversion checkout using setuptools. @param path: directory of the Subversion checkout @return: - tag (name for the repository), - rev (current Subversion revision identifier), - date (date of current revision), - hash (git hash for the Subversion revision) @rtype: C{tuple} of three C{str} and a C{time.struct_time} """ if isinstance(svn_utils, Exception): raise svn_utils tag = 'pywikibot-core' _program_dir = path or _get_program_dir() svninfo = svn_utils.SvnInfo(_program_dir) # suppress warning old_level = log.set_threshold(log.ERROR) rev = svninfo.get_revision() log.set_threshold(old_level) if not isinstance(rev, int): raise TypeError('SvnInfo.get_revision() returned type {0!s}'.format(type(rev))) if rev < 0: raise ValueError('SvnInfo.get_revision() returned {0:d}'.format(rev)) if rev == 0: raise ParseError('SvnInfo: invalid workarea') hsh, date = github_svn_rev2hash(tag, rev) rev = 's{0!s}'.format(rev) return (tag, rev, date, hsh)
def test_non_ascii(self): # Issues #8663, #34421: test that non-encodable text is escaped with # backslashreplace error handler and encodable non-ASCII text is # output as is. for errors in ('strict', 'backslashreplace', 'surrogateescape', 'replace', 'ignore'): with self.subTest(errors=errors), \ NamedTemporaryFile("w+", encoding='cp437', errors=errors) as stdout, \ NamedTemporaryFile("w+", encoding='cp437', errors=errors) as stderr: old_threshold = log.set_threshold(log.DEBUG) try: with swap_attr(sys, 'stdout', stdout), \ swap_attr(sys, 'stderr', stderr): log.debug('Dεbug\tMėssãge') log.fatal('Fαtal\tÈrrōr') finally: log.set_threshold(old_threshold) stdout.seek(0) self.assertEqual( stdout.read().rstrip(), 'Dεbug\tM?ss?ge' if errors == 'replace' else 'Dεbug\tMssge' if errors == 'ignore' else 'Dεbug\tM\\u0117ss\\xe3ge') stderr.seek(0) self.assertEqual( stderr.read().rstrip(), 'Fαtal\t?rr?r' if errors == 'replace' else 'Fαtal\trrr' if errors == 'ignore' else 'Fαtal\t\\xc8rr\\u014dr')
def test_non_ascii(self): # Issues #8663, #34421: test that non-encodable text is escaped with # backslashreplace error handler and encodable non-ASCII text is # output as is. for errors in ('strict', 'backslashreplace', 'surrogateescape', 'replace', 'ignore'): with self.subTest(errors=errors): stdout = io.TextIOWrapper(io.BytesIO(), encoding='cp437', errors=errors) stderr = io.TextIOWrapper(io.BytesIO(), encoding='cp437', errors=errors) old_threshold = log.set_threshold(log.DEBUG) try: with swap_attr(sys, 'stdout', stdout), \ swap_attr(sys, 'stderr', stderr): log.debug('Dεbug\tMėssãge') log.fatal('Fαtal\tÈrrōr') finally: log.set_threshold(old_threshold) stdout.seek(0) self.assertEqual(stdout.read().rstrip(), 'Dεbug\tM?ss?ge' if errors == 'replace' else 'Dεbug\tMssge' if errors == 'ignore' else 'Dεbug\tM\\u0117ss\\xe3ge') stderr.seek(0) self.assertEqual(stderr.read().rstrip(), 'Fαtal\t?rr?r' if errors == 'replace' else 'Fαtal\trrr' if errors == 'ignore' else 'Fαtal\t\\xc8rr\\u014dr')
def refactor_for_py3(distdir, cy3_dir): # need to convert Cython sources first import lib2to3.refactor from distutils.util import copydir_run_2to3 fixers = [ fix for fix in lib2to3.refactor.get_fixers_from_package("lib2to3.fixes") if fix.split('fix_')[-1] not in ('next',) ] if not os.path.exists(cy3_dir): os.makedirs(cy3_dir) import distutils.log as dlog dlog.set_threshold(dlog.INFO) copydir_run_2to3(distdir, cy3_dir, fixer_names=fixers, template = ''' global-exclude * graft Cython recursive-exclude Cython * recursive-include Cython *.py *.pyx *.pxd recursive-include Cython/Debugger/Tests * include runtests.py ''') sys.path.insert(0, cy3_dir) for keep_2x_file in KEEP_2X_FILES: destfile = os.path.join(cy3_dir, keep_2x_file) shutil.copy(keep_2x_file, destfile)
def getversion_svn_setuptools(path=None): """Get version info for a Subversion checkout using setuptools. @param path: directory of the Subversion checkout @return: - tag (name for the repository), - rev (current Subversion revision identifier), - date (date of current revision), - hash (git hash for the Subversion revision) @rtype: C{tuple} of three C{str} and a C{time.struct_time} """ if isinstance(svn_utils, Exception): raise svn_utils tag = 'pywikibot-core' _program_dir = path or _get_program_dir() svninfo = svn_utils.SvnInfo(_program_dir) # suppress warning old_level = log.set_threshold(log.ERROR) rev = svninfo.get_revision() log.set_threshold(old_level) if not isinstance(rev, int): raise TypeError('SvnInfo.get_revision() returned type %s' % type(rev)) if rev < 0: raise ValueError('SvnInfo.get_revision() returned %d' % rev) if rev == 0: raise ParseError('SvnInfo: invalid workarea') hsh, date = github_svn_rev2hash(tag, rev) rev = 's%s' % rev return (tag, rev, date, hsh)
def generate_version_py(packagename, version, release=None, debug=None): """Regenerate the version.py module if necessary.""" from .setup_helpers import is_distutils_display_option from .utils.compat.misc import invalidate_caches from distutils import log import imp import os import sys try: version_module = __import__(packagename + '.version', fromlist=[ '_last_generated_version', 'version', 'release', 'debug' ]) try: last_generated_version = version_module._last_generated_version except AttributeError: # Older version.py with no _last_generated_version; this will # ensure a new version.py is written last_generated_version = None current_release = version_module.release current_debug = version_module.debug except ImportError: version_module = None last_generated_version = None current_release = None current_debug = None if release is None: # Keep whatever the current value is, if it exists release = bool(current_release) if debug is None: # Likewise, keep whatever the current value is, if it exists debug = bool(current_debug) version_py = os.path.join(packagename, 'version.py') if (last_generated_version != version or current_release != release or current_debug != debug): if '-q' not in sys.argv and '--quiet' not in sys.argv: log.set_threshold(log.INFO) if is_distutils_display_option(): # Always silence unnecessary log messages when display options are # being used log.set_threshold(log.WARN) log.info('Freezing version number to {0}'.format(version_py)) with open(version_py, 'w') as f: # This overwrites the actual version.py f.write(_get_version_py_str(packagename, version, release, debug)) invalidate_caches() if version_module: imp.reload(version_module)
def __init__(self, debug=False): self._compiler = new_compiler() log.set_threshold(log.DEBUG if debug else log.INFO) customize_compiler(self._compiler) self._build_ext = build_ext(Distribution()) self._build_ext.finalize_options() self._py_lib_dirs = self._build_ext.library_dirs
def _convert_metadata(zf, destination_eggdir, dist_info, egg_info): def get_metadata(name): with zf.open(posixpath.join(dist_info, name)) as fp: value = fp.read().decode('utf-8') return email.parser.Parser().parsestr(value) wheel_metadata = get_metadata('WHEEL') # Check wheel format version is supported. wheel_version = parse_version(wheel_metadata.get('Wheel-Version')) wheel_v1 = (parse_version('1.0') <= wheel_version < parse_version('2.0dev0')) if not wheel_v1: raise ValueError('unsupported wheel format version: %s' % wheel_version) # Extract to target directory. os.mkdir(destination_eggdir) zf.extractall(destination_eggdir) # Convert metadata. dist_info = os.path.join(destination_eggdir, dist_info) dist = pkg_resources.Distribution.from_location( destination_eggdir, dist_info, metadata=pkg_resources.PathMetadata(destination_eggdir, dist_info), ) # Note: Evaluate and strip markers now, # as it's difficult to convert back from the syntax: # foobar; "linux" in sys_platform and extra == 'test' def raw_req(req): req.marker = None return str(req) install_requires = list(map(raw_req, dist.requires())) extras_require = { extra: [ req for req in map(raw_req, dist.requires((extra, ))) if req not in install_requires ] for extra in dist.extras } os.rename(dist_info, egg_info) os.rename( os.path.join(egg_info, 'METADATA'), os.path.join(egg_info, 'PKG-INFO'), ) setup_dist = setuptools.Distribution(attrs=dict( install_requires=install_requires, extras_require=extras_require, ), ) # Temporarily disable info traces. log_threshold = log._global_log.threshold log.set_threshold(log.WARN) try: write_requirements( setup_dist.get_command_obj('egg_info'), None, os.path.join(egg_info, 'requires.txt'), ) finally: log.set_threshold(log_threshold)
def run(self): # fix print("build_py_2to3") build_py_2to3.run(self) print("copy/convert test suite") loglevel = log.set_threshold(log.ERROR) copydir_run_2to3('test', 'test3', template=self.manifest_in) log.set_threshold(loglevel)
def finalize_options(self): """Finalize the options.""" bdist.finalize_options(self) BaseCommand.finalize_options(self) self.set_undefined_options("bdist", ("dist_dir", "dist_dir")) if self.debug: self.log.setLevel(logging.DEBUG) log.set_threshold(1) # Set Distutils logging level to DEBUG
def is_openmp_supported(): """ Determine whether the build compiler has OpenMP support. """ log_threshold = log.set_threshold(log.FATAL) ret = check_openmp_support() log.set_threshold(log_threshold) return ret
def tearDown(self): """Removes the patch.""" if self._old_home is None: del os.environ['HOME'] else: os.environ['HOME'] = self._old_home set_threshold(self.old_threshold) super(PyPIRCCommandTestCase, self).tearDown()
def run(self): build_py_2to3.run(self) print("copying aux dirs") loglevel = log.set_threshold(log.ERROR) for source in ['tools', 'test']: dest = os.path.join(self.build_lib, source) copydir_run_2to3(source, dest, template=self.manifest_in) log.set_threshold(loglevel)
def tearDown(self): __builtin__.__import__ = self.__import sys.stderr = self.__stderr sys.stdout = self.__stdout log.set_threshold(self.__threshold) super(TestCoverage, self).tearDown()
def tearDown(self): unittest.main = self.__unittest_main sys.stdout = self.__stdout log.set_threshold(self.__threshold) super(TestTest, self).tearDown()
def _build_extension_module(self, buildable): """ Build an extension module from the sources. """ project = self.project set_threshold(INFO if project.verbose else ERROR) distribution = Distribution() module_builder = ExtensionCommand(distribution, buildable) module_builder.build_lib = buildable.build_dir module_builder.debug = buildable.debug module_builder.ensure_finalized() # Convert the #defines. define_macros = [] for macro in buildable.define_macros: parts = macro.split('=', maxsplit=1) name = parts[0] try: value = parts[1] except IndexError: value = None define_macros.append((name, value)) buildable.make_names_relative() module_builder.extensions = [ Extension(buildable.fq_name, buildable.sources, define_macros=define_macros, include_dirs=buildable.include_dirs, libraries=buildable.libraries, library_dirs=buildable.library_dirs) ] project.progress("Compiling the '{0}' module".format( buildable.fq_name)) saved_cwd = os.getcwd() os.chdir(buildable.build_dir) try: module_builder.run() except Exception as e: raise UserException("Unable to compile the '{0}' module".format( buildable.fq_name), detail=str(e)) # Add the extension module to the buildable's list of installables. installable = Installable('module', target_subdir=buildable.get_install_subdir()) installable.files.append( module_builder.get_ext_fullpath(buildable.fq_name)) buildable.installables.append(installable) os.chdir(saved_cwd)
def generate_version_py(packagename, version, release=None, debug=None, uses_git=True, srcdir='.'): """Regenerate the version.py module if necessary.""" try: version_module = get_pkg_version_module(packagename) try: last_generated_version = version_module._last_generated_version except AttributeError: last_generated_version = version_module.version try: last_githash = version_module._last_githash except AttributeError: last_githash = version_module.githash current_release = version_module.release current_debug = version_module.debug except ImportError: version_module = None last_generated_version = None last_githash = None current_release = None current_debug = None if release is None: # Keep whatever the current value is, if it exists release = bool(current_release) if debug is None: # Likewise, keep whatever the current value is, if it exists debug = bool(current_debug) package_srcdir = os.path.join(srcdir, *packagename.split('.')) version_py = os.path.join(package_srcdir, 'version.py') if (last_generated_version != version or current_release != release or current_debug != debug): if '-q' not in sys.argv and '--quiet' not in sys.argv: log.set_threshold(log.INFO) if is_distutils_display_option(): # Always silence unnecessary log messages when display options are # being used log.set_threshold(log.WARN) log.info('Freezing version number to {0}'.format(version_py)) with open(version_py, 'w') as f: # This overwrites the actual version.py f.write(_get_version_py_str(packagename, version, last_githash, release, debug, uses_git=uses_git)) invalidate_caches() if version_module: imp.reload(version_module)
def generate_version_py(packagename, version, release=None, debug=None): """Regenerate the version.py module if necessary.""" from .setup_helpers import is_distutils_display_option from .utils.compat.misc import invalidate_caches from distutils import log import imp import os import sys try: version_module = __import__(packagename + '.version', fromlist=['_last_generated_version', 'version', 'release', 'debug']) try: last_generated_version = version_module._last_generated_version except AttributeError: # Older version.py with no _last_generated_version; this will # ensure a new version.py is written last_generated_version = None current_release = version_module.release current_debug = version_module.debug except ImportError: version_module = None last_generated_version = None current_release = None current_debug = None if release is None: # Keep whatever the current value is, if it exists release = bool(current_release) if debug is None: # Likewise, keep whatever the current value is, if it exists debug = bool(current_debug) version_py = os.path.join(packagename, 'version.py') if (last_generated_version != version or current_release != release or current_debug != debug): if '-q' not in sys.argv and '--quiet' not in sys.argv: log.set_threshold(log.INFO) if is_distutils_display_option(): # Always silence unnecessary log messages when display options are # being used log.set_threshold(log.WARN) log.info('Freezing version number to {0}'.format(version_py)) with open(version_py, 'w') as f: # This overwrites the actual version.py f.write(_get_version_py_str(packagename, version, release, debug)) invalidate_caches() if version_module: imp.reload(version_module)
def reset_distutils_log(): """ This is a setup/teardown fixture that ensures the log-level of the distutils log is always set to a default of WARN, since different settings could affect tests that check the contents of stdout. """ from distutils import log log.set_threshold(log.WARN)
def tearDown(self): """Removes the patch.""" if self._old_home is None: del os.environ['HOME'] else: os.environ['HOME'] = self._old_home if os.path.exists(self.rc): os.remove(self.rc) set_threshold(self.old_threshold)
def generate_version_py(packagename, version, release=None, debug=None, uses_git=True): """Regenerate the version.py module if necessary.""" try: version_module = get_pkg_version_module(packagename) try: last_generated_version = version_module._last_generated_version except AttributeError: last_generated_version = version_module.version try: last_githash = version_module._last_githash except AttributeError: last_githash = version_module.githash current_release = version_module.release current_debug = version_module.debug except ImportError: version_module = None last_generated_version = None last_githash = None current_release = None current_debug = None if release is None: # Keep whatever the current value is, if it exists release = bool(current_release) if debug is None: # Likewise, keep whatever the current value is, if it exists debug = bool(current_debug) version_py = os.path.join(packagename, 'version.py') if (last_generated_version != version or current_release != release or current_debug != debug): if '-q' not in sys.argv and '--quiet' not in sys.argv: log.set_threshold(log.INFO) if is_distutils_display_option(): # Always silence unnecessary log messages when display options are # being used log.set_threshold(log.WARN) log.info('Freezing version number to {0}'.format(version_py)) with open(version_py, 'w') as f: # This overwrites the actual version.py f.write(_get_version_py_str(packagename, version, last_githash, release, debug, uses_git=uses_git)) invalidate_caches() if version_module: imp.reload(version_module)
def setUpClass(cls): cls.data_dir = os.path.join(os.path.dirname(__file__), 't') cls.setup_dir = os.path.abspath('.') cls.fake_name = 'frobulate' cls.original_dir = os.getcwd() # Workaround for https://github.com/astropy/astropy-helpers/issues/124 if hasattr(sandbox, 'hide_setuptools'): sandbox.hide_setuptools = lambda: None log.set_threshold(log.WARN)
def _get_included_files(package_masks): old_threshold = set_threshold(ERROR) file_list = FileList() file_list.extend(_iter_package_modules(package_masks)) manifest = TextFile('MANIFEST.in', strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1, collapse_join=1) for line in manifest.readlines(): file_list.process_template_line(line) set_threshold(old_threshold) return file_list.files
def run(self): from distutils import log as distutils_log distutils_log.set_threshold(distutils_log.WARN) if not IS_LIGHT_BUILD: self.run_command("build_integration_docs") self.run_command("build_assets") self.run_command("build_js_sdk_registry") BuildCommand.run(self)
def run(self): old = log.set_threshold(log.ERROR) # Avoid "__init__.py not found" warning # Copy .py files and build, as usual build_py.run(self) log.set_threshold(old) # Copy data files for data_file in self.package_data_files: outfile = os.path.join(self.build_lib, data_file) self.copy_file(data_file, outfile, preserve_mode=0) executable = (os.stat(data_file).st_mode & 0o111) != 0 if executable: os.chmod(outfile, os.stat(outfile).st_mode | 0o111)
def run(self): old = log.set_threshold( log.ERROR) # Avoid "__init__.py not found" warning # Copy .py files and build, as usual build_py.run(self) log.set_threshold(old) # Copy data files for data_file in self.package_data_files: outfile = os.path.join(self.build_lib, data_file) self.copy_file(data_file, outfile, preserve_mode=0) executable = (os.stat(data_file).st_mode & 0o111) != 0 if executable: os.chmod(outfile, os.stat(outfile).st_mode | 0o111)
def distribution_hide_listing(distribution): """Given a ``distribution``, this context manager temporarily sets distutils threshold to WARN if ``--hide-listing`` argument was provided. It yields True if ``--hide-listing`` argument was provided. """ # pylint:disable=protected-access old_threshold = distutils_log._global_log.threshold hide_listing = False if (hasattr(distribution, "hide_listing") and distribution.hide_listing): hide_listing = True distutils_log.set_threshold(distutils_log.WARN) yield hide_listing distutils_log.set_threshold(old_threshold)
def finalize_options(self): """Finalize the options.""" def _get_fullname(): return "{name}{label}-{version}{edition}".format( name=self.distribution.get_name(), label="-{}".format(self.label) if self.label else "", version=self.distribution.get_version(), edition=self.edition or "" ) self.distribution.get_fullname = _get_fullname sdist.finalize_options(self) if self.debug: self.log.setLevel(logging.DEBUG) log.set_threshold(1) # Set Distutils logging level to DEBUG
def main(args=None): # HACK: ensure NPM requirements are present for all commands. log.set_threshold(log.INFO) npm_install('./nose_mocha', ['mocha']) npm_install('.', ['should']) settings = dict( name='nose-mocha', version='0.0.1', description='Integrates the Node.js testing framework Mocha with nosetests', long_description=open('README.rst').read(), author='Evan Jones', author_email='*****@*****.**', url='http://github.com/ejones/nose-mocha', keywords='test nose nosetest automatic discovery mocha nodejs js javascript', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Testing' ], license='MIT', packages=['nose_mocha'], include_package_data=True, package_data={ 'nose_mocha': [ os.path.join(dirpath, f)[len('nose_mocha/'):] for dirpath, dirnames, filenames in os.walk('nose_mocha/node_modules') for f in filenames], }, zip_safe=False, setup_requires=['nose', 'coverage'], test_suite='nose.collector', entry_points="""\ [nose.plugins.0.10] mocha = nose_mocha:Mocha """, ) if args: settings['script_name'] = __file__ settings['script_args'] = args setuptools.setup(**settings)
def setUpClass(cls): cls.fake_name = 'frobulate' cls.original_dir = os.getcwd() # Workaround for https://github.com/astropy/astropy-helpers/issues/124 if hasattr(sandbox, 'hide_setuptools'): sandbox.hide_setuptools = lambda: None cls.old_threshold = log.set_threshold(log.WARN)
def jt_dircopy_exe(fromDirectory, toDirectory): try: print(log.set_verbosity(log.INFO)) print(log.set_threshold(log.INFO)) copy_tree(fromDirectory, toDirectory) except Exception as Error: print("Error: " + str(Error))
def deploy_files(git_repo, files_to_deploy): ''' This function will recursively copy the files from "source" to "destination". The argument it receives is a list of source-destination pair(s) in the form of: [ { 'source': '/path/to/source0', 'destination': '/path/to/destination0', }, { 'source': '/path/to/source1', 'destination': '/path/to/destination1', } ] ''' # Check if the copying should be verbose or not. if logging.getLevelName(log.getEffectiveLevel()) == "INFO": verbose_copying = 1 # The following lines are to specify that the created directories and # copied files are to be printed to stdout and/or stderr. dirs_log.set_verbosity(dirs_log.INFO) dirs_log.set_threshold(dirs_log.INFO) else: verbose_copying = 0 try: # If the destination path does not exist, it will be automatically # created. If preserve_mode is true (the default), the file's mode # (type and permission bits, or whatever is analogous on the current # platform) is copied. If preserve_times is true (the default), the # last-modified and last-access times are copied as well. If update is # true, src will only be copied if dst does not exist, or if dst does # exist but is older than src. (Per our discussions, it will be False). dir_util.copy_tree( os.path.join(git_repo, files_to_deploy["source"]), files_to_deploy["destination"], preserve_mode = 1, preserve_times = 1, preserve_symlinks = 0, update = 0, verbose = verbose_copying ) except Exception as exception: log.error("Unable to deploy files. Exception: {0}".format(exception)) sys.exit(1)
def finalize_options(self): """Finalize the options.""" if self.debug: self.log.setLevel(logging.DEBUG) log.set_threshold(1) # Set Distutils logging level to DEBUG cmd_build_ext = self.distribution.get_command_obj("build_ext") cmd_build_ext.with_mysql_capi = self.with_mysql_capi cmd_build_ext.with_openssl_include_dir = self.with_openssl_include_dir cmd_build_ext.with_openssl_lib_dir = self.with_openssl_lib_dir cmd_build_ext.with_protobuf_include_dir = \ self.with_protobuf_include_dir cmd_build_ext.with_protobuf_lib_dir = self.with_protobuf_lib_dir cmd_build_ext.with_protoc = self.with_protoc cmd_build_ext.extra_compile_args = self.extra_compile_args cmd_build_ext.extra_link_args = self.extra_link_args self._copy_vendor_libraries()
def setUp(self): super().setUp() self.threshold = log.set_threshold(log.FATAL) # catching warnings # when log will be replaced by logging # we won't need such monkey-patch anymore self._old_log = log.Log._log log.Log._log = self._log self.logs = []
def setUp(self): super(LoggingSilencer, self).setUp() self.threshold = log.set_threshold(log.FATAL) # catching warnings # when log will be replaced by logging # we won't need such monkey-patch anymore self._old_log = log.Log._log log.Log._log = self._log self.logs = []
def refactor_for_py3(distdir, cy3_dir): # need to convert Cython sources first import lib2to3.refactor from distutils.util import copydir_run_2to3 fixers = [ fix for fix in lib2to3.refactor.get_fixers_from_package("lib2to3.fixes") if fix.split('fix_')[-1] not in ('next',) ] if not os.path.exists(cy3_dir): os.makedirs(cy3_dir) import distutils.log as dlog dlog.set_threshold(dlog.INFO) copydir_run_2to3(distdir, cy3_dir, fixer_names=fixers, template = ''' global-exclude * graft Cython recursive-exclude Cython * recursive-include Cython *.py *.pyx *.pxd ''') sys.path.insert(0, cy3_dir)
def run(self): # locate the extension cext = self.get_editline_extension() # increase the log level to quiet the spew oldlog = log.set_threshold(log.WARN) # check for the locally installed library found = self.check_local_libedit(cext) # setup the internal build if necessary if not found: self.configure_builtin_libedit(cext) # restore log.set_threshold(oldlog) # now run the common build super().run()
def setUp(self): super(TestTest, self).setUp() self.__threshold = log.set_threshold(log.INFO) self.__stdout = sys.stdout self.stdout = StringIO.StringIO() sys.stdout = self.stdout self.__unittest_main = unittest.main
def _generate_version_py(version, release): """Regenerate the version.py module if necessary.""" import os import sys try: from astropy.version import version as current_version except ImportError: current_version = None version_py = os.path.join('astropy', 'version.py') if current_version != version: if '-q' not in sys.argv and '--quiet' not in sys.argv: log.set_threshold(log.INFO) log.info('Freezing version number to {0}'.format(version_py)) with open(version_py, 'w') as f: # This overwrites the actual version.py f.write(_get_version_py_str(version, release))
def copyFilesAccross_noshutil(source, destination, upd, file): #function is deprecated . use copyFilesAcross_withShutil lg.set_verbosity(lg.INFO) lg.set_threshold(lg.INFO) # check if there is enough space startTime = datetime.datetime.now() log(0, "copyFilesAccross: Copying files ...", file) try: dir_util.copy_tree(source, destination, update=upd, verbose=1) log( 0, "copyFilesAccross: Operation has completed successfully in: " + str(datetime.datetime.now() - startTime), file) except OSError as e: log( 1, "copyFileAccross: Failed to copy from " + source + " to " + destination + " with error: " + e, file) except dir_util.DistutilsFileError as e: log( 1, "copyFileAccross: Failed to copy from " + source + " to " + destination + " with error: " + str(e), file)
def test_non_ascii(self): old_stdout = sys.stdout old_stderr = sys.stderr old_threshold = log.set_threshold(log.DEBUG) try: with NamedTemporaryFile(mode='w+', encoding='ascii' ) as stdout, NamedTemporaryFile(mode='w+', encoding='ascii' ) as stderr: sys.stdout = stdout sys.stderr = stderr log.debug('debug:é') log.fatal('fatal:é') stdout.seek(0) self.assertEqual(stdout.read().rstrip(), 'debug:\\xe9') stderr.seek(0) self.assertEqual(stderr.read().rstrip(), 'fatal:\\xe9') finally: log.set_threshold(old_threshold) sys.stdout = old_stdout sys.stderr = old_stderr
def test_non_ascii(self): # Issue #8663: test that non-ASCII text is escaped with # backslashreplace error handler (stream use ASCII encoding and strict # error handler) old_stdout = sys.stdout old_stderr = sys.stderr try: log.set_threshold(log.DEBUG) with NamedTemporaryFile(mode="w+", encoding='ascii') as stdout, \ NamedTemporaryFile(mode="w+", encoding='ascii') as stderr: sys.stdout = stdout sys.stderr = stderr log.debug("debug:\xe9") log.fatal("fatal:\xe9") stdout.seek(0) self.assertEqual(stdout.read().rstrip(), "debug:\\xe9") stderr.seek(0) self.assertEqual(stderr.read().rstrip(), "fatal:\\xe9") finally: sys.stdout = old_stdout sys.stderr = old_stderr
def generate_version_py(packagename, version, release, debug=None): """Regenerate the version.py module if necessary.""" from distutils import log import imp import os import sys try: version_module = __import__(packagename + '.version', fromlist=['version', 'release', 'debug']) current_version = version_module.version current_release = version_module.release current_debug = version_module.debug except ImportError: version_module = None current_version = None current_release = None current_debug = None if debug is None: # Keep whatever the current value is, if it exists debug = bool(current_debug) version_py = os.path.join(packagename, 'version.py') if (current_version != version or current_release != release or current_debug != debug): if '-q' not in sys.argv and '--quiet' not in sys.argv: log.set_threshold(log.INFO) log.info('Freezing version number to {0}'.format(version_py)) with open(version_py, 'w') as f: # This overwrites the actual version.py f.write(_get_version_py_str(packagename, version, release, debug)) if version_module: imp.reload(version_module)
def setUp(self): super(TestCoverage, self).setUp() self.__threshold = log.set_threshold(log.INFO) self.__stdout = sys.stdout self.stdout = StringIO.StringIO() sys.stdout = self.stdout self.__stderr = sys.stderr self.stderr = StringIO.StringIO() sys.stderr = self.stderr self.__import = __builtin__.__import__
def setUp(self): """Patches the environment.""" super(PyPIRCCommandTestCase, self).setUp() self.tmp_dir = self.mkdtemp() os.environ['HOME'] = self.tmp_dir self.rc = os.path.join(self.tmp_dir, '.pypirc') self.dist = Distribution() class command(PyPIRCCommand): def __init__(self, dist): PyPIRCCommand.__init__(self, dist) def initialize_options(self): pass finalize_options = initialize_options self._cmd = command self.old_threshold = set_threshold(WARN)
def setUp(self): """Patches the environment.""" if os.environ.has_key('HOME'): self._old_home = os.environ['HOME'] else: self._old_home = None curdir = os.path.dirname(__file__) os.environ['HOME'] = curdir self.rc = os.path.join(curdir, '.pypirc') self.dist = Distribution() class command(PyPIRCCommand): def __init__(self, dist): PyPIRCCommand.__init__(self, dist) def initialize_options(self): pass finalize_options = initialize_options self._cmd = command self.old_threshold = set_threshold(WARN)