Beispiel #1
0
def TestPlatform( ):
    print ("----------Operation System--------------------------")
    #   取得 python 版本
    print '取得 python 版本 : ' + platform.python_version()

    #   取得操作系統可執行結構 : ex('64bit','WindowsPE')
    print "取得操作系統可執行結構 : ex('64bit','WindowsPE')"
    print platform.architecture()

    #   電腦目前網路群組名稱
    print '電腦目前網路群組名稱' + platform.node()

    #   獲取操作系統名稱及版本號,‘Windows-7-6.1.7601-SP1’
    print '獲取操作系統名稱及版本號 : ' + platform.platform()

    #   電腦處理器資訊,’Intel64 Family 6 Model 42 Stepping 7, GenuineIntel’
    print '電腦處理器資訊 : ' + platform.processor()

    #   獲取操作系統中 Python 的構建日期
    print "獲取操作系統中 Python 的構建日期"
    print platform.python_build()

    #  獲取系統中 python 解釋器的信息
    print '獲取系統中 python 解釋器的信息 : ' + platform.python_compiler()

    if platform.python_branch()=="":
        print platform.python_implementation()
        print platform.python_revision()
    print "platform.release : " + platform.release()
    print "platform.system : " + platform.system()

    #print platform.system_alias()
    #  獲取操作系統版本
    print '獲取操作系統版本 : ' + platform.version()
Beispiel #2
0
def check_python_version(self, impl=None, version=(), cmp_=operator.ge):
    """Check for Python version. Very loosely based upon
    :meth:`waflib.Tools.python.check_python_version()`, but simplified and
    designed to check the currently running interpreter.

    :param impl: Python implementation as returned by
        :func:`platform.python_implementation()`
    :type impl: :class:`str`
    :param version: Partial or full version tuple as returned by
        :func:`platform.python_version_tuple()`
    :type version: :class:`tuple` of :class:`str`
    :param cmp_: Comparison operator for the version
    :type cmp_: :class:`func`
    """
    meets_requirements = (
        (impl is None or platform.python_implementation() == impl) and
        (version == () or cmp_(platform.python_version_tuple(), version)))
    required_version_string = '.'.join(version)
    required_name = (impl or 'Python') + (' ' if version else '') + (
        required_version_string)
    found_name = '{0} {1}'.format(
        platform.python_implementation(), platform.python_version())
    self.msg('Checking for ' + required_name,
             found_name if meets_requirements else False)
    if not meets_requirements:
        self.fatal('Expecting {0}, found {1}'.format(
            required_name, found_name))

    # Set the PYTHON configuration variable if successful. It is set as a list
    # following the lead of find_program() in Waf 1.8.
    self.env.PYTHON = [sys.executable]
def _get_file_from_object(obj):
    if platform.python_implementation() == 'Jython':
        return obj.__file__.split("$py.class")[0] + ".py"
    if sys.version_info > (3, 0) or platform.python_implementation() == 'PyPy':
        return obj.__file__
    else:
        return obj.__file__[:-1]
Beispiel #4
0
def detect_platform():
    _implementation = None
    _implementation_version = None

    if "SERVER_SOFTWARE" in os.environ:
        v = os.environ["SERVER_SOFTWARE"]
        if v.startswith("Google App Engine/") or v.startswith("Development/"):
            _implementation = "Google App Engine"
        else:
            _implementation = platform.python_implementation()
    else:
        _implementation = platform.python_implementation()

    if _implementation == "CPython":
        _implementation_version = platform.python_version()
    elif _implementation == "PyPy":
        _implementation_version = "%s.%s.%s" % (
            sys.pypy_version_info.major,
            sys.pypy_version_info.minor,
            sys.pypy_version_info.micro,
        )
        if sys.pypy_version_info.releaselevel != "final":
            _implementation_version = "".join([_implementation_version, sys.pypy_version_info.releaselevel])
    elif _implementation == "Jython":
        _implementation_version = platform.python_version()  # Complete Guess
    elif _implementation == "IronPython":
        _implementation_version = platform.python_version()  # Complete Guess
    elif _implementation == "Google App Engine":
        v = os.environ["SERVER_SOFTWARE"]
        _implementation_version = v.split("/")[1]
    else:
        _implementation_version = "Unknown"

    return {"implementation": _implementation, "version": _implementation_version}
Beispiel #5
0
def get_python_version():
	version = ""
	if platform.python_implementation() == "CPython":
		version = "Python"
	else:
		version = platform.python_implementation()
	version += " {0}.{1}  ".format(sys.version_info[0], sys.version_info[1])
	version += "({0})".format(platform.architecture()[0])
	return version
Beispiel #6
0
def initialize_platform():
    """Check the python_implementation and the python_version.
       Update the global variables __is_iron_python__ and __is_unicode__.
    """
    global __is_iron_python__, __is_unicode__
    if platform.python_implementation() == "CPython":
        if platform.python_version_tuple()[0] >= '3':
            __is_unicode__ = True
    elif platform.python_implementation() == "IronPython":
        __is_iron_python__ = True
        __is_unicode__ = True
Beispiel #7
0
def get_python_version():
    """Return the Python version number as a string."""
    version = ""
    if platform.python_implementation() == "CPython":
        version = "Python"
    else:
        version = platform.python_implementation()
    version += " {0}.{1}.{2}  ".format(sys.version_info[0],
                                       sys.version_info[1],
                                       sys.version_info[2])
    version += "({0})".format(platform.architecture()[0])
    return version
Beispiel #8
0
def python_version_string():
    """We use it to generate per python folder name, where
    we will install all packages.
    """
    if platform.python_implementation() == 'PyPy':
        version_info = sys.pypy_version_info
    else:
        version_info = sys.version_info
    version_string = '{v.major}.{v.minor}.{v.micro}'.format(v=version_info)
    build, _ = platform.python_build()
    build = build.replace(':', '_')  # windows do not understand `:` in path
    return '{}-{}-{}'.format(platform.python_implementation(), version_string, build)
Beispiel #9
0
def get_system_info():
    if os.name == 'nt':
        ver, build, sp, type = platform.win32_ver()
        arch = platform.machine()
        pyver = platform.python_version()
        impl = platform.python_implementation()
        return 'Windows {ver} {sp}, NT {build} on {arch} Python {pyver} ({impl}).'.format(ver=ver, sp=sp, build=build, arch=arch, pyver=pyver, impl=impl)
    elif os.name == 'posix':
        distro = platform.linux_distribution()
        distro = '{0} {2} {1}'.format(*distro)
        return '{name} ({distro}) {sysver}, Python {pyver} ({impl})'.format(name=platform.system(),
                                                                            distro=distro,
                                                                            sysver=platform.release(),
                                                                            pyver=platform.python_version(),
                                                                            impl=platform.python_implementation())
Beispiel #10
0
def test_travis_python_environment():
    travis_python_version = environ[TRAVIS_PYTHON_VERSION]

    travis_python_version = travis_python_version.lower()
    if travis_python_version == 'pypy':
        assert platform.python_implementation() == PYPY
        expected_version = (2, 7)
    elif travis_python_version == 'pypy3':
        assert platform.python_implementation() == PYPY
        expected_version = (3, 2)
    else:
        expected_version = tuple([int(part) for part in travis_python_version.split('.')])

    from sys import version_info
    assert expected_version == version_info[:2]
	def get(self):

		retVal = {}
		retVal["success"] = True
		retVal["message"] = "OK"
		retVal["commit"] = os.environ["COMMIT"] if "COMMIT" in os.environ else "dev"
		retVal["timestamp"] = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
		retVal["lastmod"] = os.environ["LASTMOD"] if "LASTMOD" in os.environ else "dev"
		retVal["tech"] = "Python %d.%d.%d" % (sys.version_info.major, sys.version_info.minor, sys.version_info.micro)
		retVal["version"] = "%s (%s)" % (platform.python_version(), platform.python_implementation())
		add_if_exists(retVal, "platform.machine()", platform.machine())
		add_if_exists(retVal, "platform.node()", platform.node())
		#IOError: add_if_exists(retVal, "platform.platform()", platform.platform())
		add_if_exists(retVal, "platform.processor()", platform.processor())
		add_if_exists(retVal, "platform.python_branch()", platform.python_branch())
		add_if_exists(retVal, "platform.python_build()", platform.python_build())
		add_if_exists(retVal, "platform.python_compiler()", platform.python_compiler())
		add_if_exists(retVal, "platform.python_implementation()", platform.python_implementation())
		add_if_exists(retVal, "platform.python_version()", platform.python_version())
		add_if_exists(retVal, "platform.python_revision()", platform.python_revision())
		add_if_exists(retVal, "platform.release()", platform.release())
		add_if_exists(retVal, "platform.system()", platform.system())
		add_if_exists(retVal, "platform.version()", platform.version())
		add_if_exists(retVal, "platform.uname()", platform.uname())
		add_if_exists(retVal, "sysconfig.get_platform()", sysconfig.get_platform())
		add_if_exists(retVal, "sysconfig.get_python_version()", sysconfig.get_python_version())
		add_if_exists(retVal, "sys.byteorder", sys.byteorder)
		add_if_exists(retVal, "sys.copyright", sys.copyright)
		add_if_exists(retVal, "sys.getdefaultencoding()", sys.getdefaultencoding())
		add_if_exists(retVal, "sys.getfilesystemencoding()", sys.getfilesystemencoding())
		add_if_exists(retVal, "sys.maxint", sys.maxint)
		add_if_exists(retVal, "sys.maxsize", sys.maxsize)
		add_if_exists(retVal, "sys.maxunicode", sys.maxunicode)
		add_if_exists(retVal, "sys.version", sys.version)

		self.response.headers['Content-Type'] = 'text/plain'

		callback = self.request.get('callback')
		if len(callback) == 0 or re.match("[a-zA-Z][-a-zA-Z0-9_]*$", callback) is None:
			self.response.headers['Access-Control-Allow-Origin'] = '*'
			self.response.headers['Access-Control-Allow-Methods'] = 'POST, GET'
			self.response.headers['Access-Control-Max-Age'] = '604800' # 1 week
			self.response.out.write(json.dumps(retVal, separators=(',', ':')))
		else:
			self.response.out.write(callback)
			self.response.out.write("(")
			self.response.out.write(json.dumps(retVal, separators=(',', ':')))
			self.response.out.write(");")
Beispiel #12
0
def default_user_agent(name="python-requests"):
    """Return a string representing the default user agent."""
    _implementation = platform.python_implementation()

    if _implementation == 'CPython':
        _implementation_version = platform.python_version()
    elif _implementation == 'PyPy':
        _implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major,
                                                sys.pypy_version_info.minor,
                                                sys.pypy_version_info.micro)
        if sys.pypy_version_info.releaselevel != 'final':
            _implementation_version = ''.join([_implementation_version, sys.pypy_version_info.releaselevel])
    elif _implementation == 'Jython':
        _implementation_version = platform.python_version()  # Complete Guess
    elif _implementation == 'IronPython':
        _implementation_version = platform.python_version()  # Complete Guess
    else:
        _implementation_version = 'Unknown'

    try:
        p_system = platform.system()
        p_release = platform.release()
    except IOError:
        p_system = 'Unknown'
        p_release = 'Unknown'

    return " ".join(['%s/%s' % (name, __version__),
                     '%s/%s' % (_implementation, _implementation_version),
                     '%s/%s' % (p_system, p_release)])
Beispiel #13
0
    def test_6_cleanup(self):
        """
        verify that when an SSHClient is collected, its transport (and the
        transport's packetizer) is closed.
        """
        # Unclear why this is borked on Py3, but it is, and does not seem worth
        # pursuing at the moment. Skipped on PyPy because it fails on travis
        # for unknown reasons, works fine locally.
        # XXX: It's the release of the references to e.g packetizer that fails
        # in py3...
        if not PY2 or platform.python_implementation() == "PyPy":
            return
        threading.Thread(target=self._run).start()

        self.tc = paramiko.SSHClient()
        self.tc.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        self.assertEqual(0, len(self.tc.get_host_keys()))
        self.tc.connect(**dict(self.connect_kwargs, password='******'))

        self.event.wait(1.0)
        self.assertTrue(self.event.is_set())
        self.assertTrue(self.ts.is_active())

        p = weakref.ref(self.tc._transport.packetizer)
        self.assertTrue(p() is not None)
        self.tc.close()
        del self.tc

        # force a collection to see whether the SSHClient object is deallocated
        # correctly. 2 GCs are needed to make sure it's really collected on
        # PyPy
        gc.collect()
        gc.collect()

        self.assertTrue(p() is None)
Beispiel #14
0
    def test_linux(self):
        lib_path = ffi.lib._name
        env = os.environ.copy()
        env['LANG'] = 'C'
        p = subprocess.Popen(["objdump", "-p", lib_path],
                             stdout=subprocess.PIPE, env=env)
        out, _ = p.communicate()
        self.assertEqual(0, p.returncode)
        # Parse library dependencies
        lib_pat = re.compile(r'^([-_a-zA-Z0-9]+)\.so(?:\.\d+){0,3}$')
        deps = set()
        for line in out.decode().splitlines():
            parts = line.split()
            if parts and parts[0] == 'NEEDED':
                dep = parts[1]
                m = lib_pat.match(dep)
                if len(parts) != 2 or not m:
                    self.fail("invalid NEEDED line: %r" % (line,))
                deps.add(m.group(1))
        # Sanity check that our dependencies were parsed ok
        if 'libc' not in deps or 'libpthread' not in deps:
            self.fail("failed parsing dependencies? got %r" % (deps,))
        # Ensure all dependencies are expected
        allowed = set(['librt', 'libdl', 'libpthread', 'libz', 'libm',
                       'libgcc_s', 'libc', 'ld-linux', 'ld64'])
        if platform.python_implementation() == 'PyPy':
            allowed.add('libtinfo')

        for dep in deps:
            if not dep.startswith('ld-linux-') and dep not in allowed:
                self.fail("unexpected dependency %r in %r" % (dep, deps))
Beispiel #15
0
    def user_agent(): # pragma: no cover
        """Returns string representation of user-agent"""
        implementation = platform.python_implementation()

        if implementation == 'CPython':
            version = platform.python_version()
        elif implementation == 'PyPy':
            version = '%s.%s.%s' % (sys.pypy_version_info.major, sys.pypy_version_info.minor, sys.pypy_version_info.micro)
        elif implementation == 'Jython':
            version = platform.python_version()
        elif implementation == 'IronPython':
            version = platform.python_version()
        else:
            version = 'Unknown'

        try:
            system = platform.system()
            release = platform.release()
        except IOError:
            system = 'Unknown'
            release = 'Unknown'

        return " ".join([
            'appurify-client/%s' % constants.__version__,
            'python-requests/%s' % requests.__version__,
            '%s/%s' % (implementation, version),
            '%s/%s' % (system, release)
        ])
Beispiel #16
0
def python_implementation():
    if hasattr(sys, 'implementation'):
        # PEP 421, Python 3.3
        name = sys.implementation.name
    else:
        name = platform.python_implementation()
    return name.lower()
Beispiel #17
0
def process_files():
	"""Main processing function."""
	# options are byte code for AStyle input
	option_bytes = b"-A2tOP"
	files = [ "../test-c/ASBeautifier.cpp",
	          "../test-c/ASFormatter.cpp" ,
	          "../test-c/astyle.h" ]

	#initialization
	print("ExampleByte",
			platform.python_implementation(),
			platform.python_version(),
			platform.architecture()[0])
	initialize_platform()
	libc = initialize_library()
	version_bytes = get_astyle_version_bytes(libc)
	print("Artistic Style Version " + version_bytes.decode('utf-8'))
	# process the input files
	for file_path in files:
		bytes_in = get_source_code_bytes(file_path)
		formatted_bytes = format_source_bytes(libc, bytes_in, option_bytes)
		if type(formatted_bytes) is type(None):
			print("Error in formatting", file_path)
			continue
		print("Formatted", file_path)
		save_source_code_bytes(formatted_bytes, file_path)
Beispiel #18
0
Datei: misc.py Projekt: bket/borg
def sysinfo():
    show_sysinfo = os.environ.get('BORG_SHOW_SYSINFO', 'yes').lower()
    if show_sysinfo == 'no':
        return ''

    python_implementation = platform.python_implementation()
    python_version = platform.python_version()
    # platform.uname() does a shell call internally to get processor info,
    # creating #3732 issue, so rather use os.uname().
    try:
        uname = os.uname()
    except AttributeError:
        uname = None
    if sys.platform.startswith('linux'):
        try:
            linux_distribution = platform.linux_distribution()  # noqa
        except:
            # platform.linux_distribution() is deprecated since py 3.5 and removed in 3.7.
            linux_distribution = ('Unknown Linux', '', '')
    else:
        linux_distribution = None
    info = []
    if uname is not None:
        info.append('Platform: %s' % (' '.join(uname), ))
    if linux_distribution is not None:
        info.append('Linux: %s %s %s' % linux_distribution)
    info.append('Borg: %s  Python: %s %s' % (borg_version, python_implementation, python_version))
    info.append('PID: %d  CWD: %s' % (os.getpid(), os.getcwd()))
    info.append('sys.argv: %r' % sys.argv)
    info.append('SSH_ORIGINAL_COMMAND: %r' % os.environ.get('SSH_ORIGINAL_COMMAND'))
    info.append('')
    return '\n'.join(info)
Beispiel #19
0
def index():
    """ The main Flask entry-point (/) for the Stallion server. """
    data = {"breadpath": [Crumb("Main")]}

    data.update(get_shared_data())
    data["menu_home"] = "active"

    sys_info = {
        "Python Platform": sys.platform,
        "Python Version": sys.version,
        "Python Prefix": sys.prefix,
        "Machine Type": platform.machine(),
        "Platform": platform.platform(),
        "Processor": platform.processor(),
    }

    try:
        sys_info["Python Implementation"] = platform.python_implementation()
    except:
        pass

    sys_info["System"] = platform.system()
    sys_info["System Arch"] = platform.architecture()

    data["system_information"] = sys_info

    return render_template("system_information.html", **data)
Beispiel #20
0
 def __init__(self, email=None, token=None, organization_id=False,
              api_root="https://secure.transcriptic.com", organization=False,
              cookie=False, verbose=False, use_environ=True, analytics=True, user_id="default"):
     if email is None and use_environ:
         email = os.environ['USER_EMAIL']
         token = os.environ['USER_TOKEN']
         organization_id = os.environ['USER_ORGANIZATION']
     self.api_root = api_root
     self.email = email
     self.token = token
     self.organization_id = organization_id or organization
     self.verbose = verbose
     self.analytics = analytics
     self.user_id = user_id
     self.headers = {
         "X-User-Email": email,
         "X-User-Token": token,
         "Content-Type": "application/json",
         "Accept": "application/json",
         "User-Agent": "txpy/{} ({}/{}; {}/{}; {}; {})".format(__version__,
                                                               platform.python_implementation(),
                                                               platform.python_version(),
                                                               platform.system(),
                                                               platform.release(),
                                                               platform.machine(),
                                                               platform.architecture()[0])
     }
     # Preload known environment arguments
     self.env_args = dict(api_root=self.api_root, org_id=self.organization_id)
     transcriptic.api = self
Beispiel #21
0
def runSlice(fileNames):
	"Run the slicer on the files. If we are running with PyPy then just do the slicing action. If we are running as Python, try to find pypy."
	pypyExe = getPyPyExe()
	for fileName in fileNames:
		if fileName.startswith("#UTF8#"):
			fileName = unicode(fileName[6:], "utf-8")
		if platform.python_implementation() == "PyPy":
			skeinforge_craft.writeOutput(fileName)
		elif pypyExe == False:
			if not hasattr(sys, 'frozen'):
				print("************************************************")
				print("* Failed to find pypy, so slicing with python! *")
				print("************************************************")
				skeinforge_craft.writeOutput(fileName)
				print("************************************************")
				print("* Failed to find pypy, so sliced with python!  *")
				print("************************************************")
			else:
				print("******************************************************************")
				print("* Failed to find pypy, we need pypy to slice with a frozen build *")
				print("* Place pypy in the same directory as Cura so Cura can find it.  *")
				print("******************************************************************")
				sys.exit(1)
		else:
			subprocess.call(getSliceCommand(fileName))
Beispiel #22
0
def show_sidebar_button_info(python_input):
    """
    Create `Layout` for the information in the right-bottom corner.
    (The right part of the status bar.)
    """
    @if_mousedown
    def toggle_sidebar(cli, mouse_event):
        " Click handler for the menu. "
        python_input.show_sidebar = not python_input.show_sidebar

    token = Token.Toolbar.Status

    version = sys.version_info
    tokens = [
        (token.Key, '[F2]', toggle_sidebar),
        (token, ' Menu', toggle_sidebar),
        (token, ' - '),
        (token.PythonVersion, '%s %i.%i.%i' % (platform.python_implementation(),
                                               version[0], version[1], version[2])),
        (token, ' '),
    ]
    width = token_list_width(tokens)

    def get_tokens(cli):
        # Python version
        return tokens

    return ConditionalContainer(
        content=Window(
            TokenListControl(get_tokens, default_char=Char(token=token)),
            height=LayoutDimension.exact(1),
            width=LayoutDimension.exact(width)),
        filter=~IsDone() & RendererHeightIsKnown() &
            Condition(lambda cli: python_input.show_status_bar and
                                  not python_input.show_exit_confirmation))
Beispiel #23
0
def _cryptography_version():
    # pyca/cryptography dropped support for PyPy < 5.4 in 2.5
    # https://cryptography.io/en/latest/changelog/#v2-5
    if platform.python_implementation() == 'PyPy' and platform.python_version() < '5.4':
        return 'cryptography < 2.5'

    return 'cryptography'
Beispiel #24
0
 def test_large_read_until(self):
     # Performance test: read_until used to have a quadratic component
     # so a read_until of 4MB would take 8 seconds; now it takes 0.25
     # seconds.
     server, client = self.make_iostream_pair()
     try:
         try:
             # This test fails on pypy with ssl.  I think it's because
             # pypy's gc defeats moves objects, breaking the
             # "frozen write buffer" assumption.
             if (isinstance(server, SSLIOStream) and
                 platform.python_implementation() == 'PyPy'):
                 raise unittest.SkipTest(
                     "pypy gc causes problems with openssl")
         except AttributeError:
             # python 2.5 didn't have platform.python_implementation,
             # but there was no pypy for 2.5
             pass
         NUM_KB = 4096
         for i in xrange(NUM_KB):
             client.write(b("A") * 1024)
         client.write(b("\r\n"))
         server.read_until(b("\r\n"), self.stop)
         data = self.wait()
         self.assertEqual(len(data), NUM_KB * 1024 + 2)
     finally:
         server.close()
         client.close()
Beispiel #25
0
def python_info():
    return {
        'name': 'Python',
        'version': '%s %s' % (
            platform.python_implementation(), platform.python_version()),
        'path': os.path.dirname(platform.__file__),
    }
Beispiel #26
0
def run_setup(extensions):
    kw = {'cmdclass': {'doc': DocCommand, 'gevent_nosetests': gevent_nosetests}}
    if extensions:
        kw['cmdclass']['build_ext'] = build_extensions
        kw['ext_modules'] = extensions

    dependencies = ['futures', 'scales', 'blist']
    if platform.python_implementation() != "CPython":
        dependencies.remove('blist')

    setup(
        name='cassandra-driver',
        version=__version__,
        description='Python driver for Cassandra',
        long_description=long_description,
        url='http://github.com/datastax/python-driver',
        author='Tyler Hobbs',
        author_email='*****@*****.**',
        packages=['cassandra', 'cassandra.io'],
        include_package_data=True,
        install_requires=dependencies,
        tests_require=['nose', 'mock', 'ccm', 'unittest2', 'PyYAML', 'pytz'],
        classifiers=[
            'Development Status :: 5 - Production/Stable',
            'Intended Audience :: Developers',
            'License :: OSI Approved :: Apache Software License',
            'Natural Language :: English',
            'Operating System :: OS Independent',
            'Programming Language :: Python',
            'Programming Language :: Python :: 2',
            'Programming Language :: Python :: 2.6',
            'Programming Language :: Python :: 2.7',
            'Topic :: Software Development :: Libraries :: Python Modules'
        ],
        **kw)
Beispiel #27
0
    def test_behave_masking_user_attribute_causes_warning(self):
        warns = []

        def catch_warning(*args, **kwargs):
            warns.append(args[0])

        old_showwarning = warnings.showwarning
        warnings.showwarning = catch_warning

        with self.context.use_with_user_mode():
            self.context.thing = "stuff"
        # pylint: disable=protected-access
        self.context._push()
        self.context.thing = "other stuff"

        warnings.showwarning = old_showwarning

        print(repr(warns))
        assert warns, "OOPS: warns is empty, but expected non-empty"
        warning = warns[0]
        assert isinstance(warning, runner.ContextMaskWarning), "warning is not a ContextMaskWarning"
        info = warning.args[0]
        assert info.startswith("behave runner"), "%r doesn't start with 'behave runner'" % info
        assert "'thing'" in info, "%r not in %r" % ("'thing'", info)
        filename = __file__.rsplit(".", 1)[0]
        if python_implementation() == "Jython":
            filename = filename.replace("$py", ".py")
        assert filename in info, "%r not in %r" % (filename, info)
Beispiel #28
0
    def test_setting_root_attribute_that_masks_existing_causes_warning(self):
        # pylint: disable=protected-access
        warns = []

        def catch_warning(*args, **kwargs):
            warns.append(args[0])

        old_showwarning = warnings.showwarning
        warnings.showwarning = catch_warning

        with self.context.use_with_user_mode():
            self.context._push()
            self.context.thing = "teak"
        self.context._set_root_attribute("thing", "oak")

        warnings.showwarning = old_showwarning

        print(repr(warns))
        assert warns
        warning = warns[0]
        assert isinstance(warning, runner.ContextMaskWarning)
        info = warning.args[0]
        assert info.startswith("behave runner"), "%r doesn't start with 'behave runner'" % info
        assert "'thing'" in info, "%r not in %r" % ("'thing'", info)
        filename = __file__.rsplit(".", 1)[0]
        if python_implementation() == "Jython":
            filename = filename.replace("$py", ".py")
        assert filename in info, "%r not in %r" % (filename, info)
Beispiel #29
0
    def run_tests(self):
        # For a partial run, we do not need to clutter the output.
        if (self.ns.verbose
            or self.ns.header
            or not (self.ns.pgo or self.ns.quiet or self.ns.single
                    or self.tests or self.ns.args)):
            # Print basic platform information
            print("==", platform.python_implementation(), *sys.version.split())
            print("==  ", platform.platform(aliased=True),
                          "%s-endian" % sys.byteorder)
            print("==  ", "hash algorithm:", sys.hash_info.algorithm,
                  "64bit" if sys.maxsize > 2**32 else "32bit")
            print("==  ", os.getcwd())
            print("Testing with flags:", sys.flags)

        if self.ns.randomize:
            print("Using random seed", self.ns.random_seed)

        if self.ns.forever:
            self.tests = self._test_forever(list(self.selected))
            self.test_count = ''
            self.test_count_width = 3
        else:
            self.tests = iter(self.selected)
            self.test_count = '/{}'.format(len(self.selected))
            self.test_count_width = len(self.test_count) - 1

        if self.ns.use_mp:
            from test.libregrtest.runtest_mp import run_tests_multiprocess
            run_tests_multiprocess(self)
        else:
            self.run_tests_sequential()
Beispiel #30
0
def main():
    """Application logging."""
    config_path = utils.AppUtils().config_path
    log_file = os.path.join(config_path, f"{const.APP_NAME}.log")

    # Get the Python architecture
    py_arch = "x64"
    if sys.maxsize < 2 ** 32:
        py_arch = "x86"

    logging.basicConfig(
        level=logging.DEBUG,
        format="%(asctime)s : %(levelname)s : %(message)s",
        filename=log_file,
        filemode="a"
    )

    logging.debug(f"Begin logging to {log_file}")
    logging.debug(f"Timestamp: {datetime.utcnow().isoformat()}")
    logging.debug("You are running {0} {1} {2} on {3} {4}.".format(
        platform.python_implementation(),
        py_arch,
        platform.python_version(),
        platform.machine(),
        platform.platform())
    )
        with self.assertRaises(errors.RemoteConnectionError) as err_info:
            with ftp_errors(mem_fs):
                raise EOFError
        self.assertEqual(str(err_info.exception),
                         "lost connection to ftp.example.com")

        with self.assertRaises(errors.RemoteConnectionError) as err_info:
            with ftp_errors(mem_fs):
                raise socket.error
        self.assertEqual(str(err_info.exception),
                         "unable to connect to ftp.example.com")


@mark.slow
@unittest.skipIf(platform.python_implementation() == "PyPy",
                 "ftp unreliable with PyPy")
class TestFTPFS(FSTestCases, unittest.TestCase):

    user = "******"
    pasw = "1234"

    @classmethod
    def setUpClass(cls):
        from pyftpdlib.test import ThreadedTestFTPd

        super(TestFTPFS, cls).setUpClass()

        cls._temp_dir = tempfile.mkdtemp("ftpfs2tests")
        cls._temp_path = os.path.join(cls._temp_dir, text_type(uuid.uuid4()))
        os.mkdir(cls._temp_path)
import warnings
try:
    import xml.etree.cElementTree as ElementTree
except ImportError:
    import xml.etree.ElementTree as ElementTree
from unicodedata import east_asian_width

from . import six

PY26 = six.PY2 and sys.version_info[1] == 6
PY27 = six.PY2 and sys.version_info[1] == 7
LESS_PY26 = six.PY2 and sys.version_info[1] < 6
LESS_PY32 = six.PY3 and sys.version_info[1] < 2
LESS_PY33 = six.PY3 and sys.version_info[1] < 3
LESS_PY34 = six.PY3 and sys.version_info[1] < 4
PYPY = platform.python_implementation().lower() == 'pypy'

SEEK_SET = 0
SEEK_CUR = 1
SEEK_END = 2

# Definition of East Asian Width
# http://unicode.org/reports/tr11/
# Ambiguous width can be changed by option
_EAW_MAP = {'Na': 1, 'N': 1, 'W': 2, 'F': 2, 'H': 1}

import decimal
DECIMAL_TYPES = [decimal.Decimal, ]

import json
Beispiel #33
0
local = unittest.skipUnless(CASSANDRA_IP.startswith("127.0.0."), 'Tests only runs against local C*')
notprotocolv1 = unittest.skipUnless(PROTOCOL_VERSION > 1, 'Protocol v1 not supported')
lessthenprotocolv4 = unittest.skipUnless(PROTOCOL_VERSION < 4, 'Protocol versions 4 or greater not supported')
greaterthanprotocolv3 = unittest.skipUnless(PROTOCOL_VERSION >= 4, 'Protocol versions less than 4 are not supported')
protocolv5 = unittest.skipUnless(5 in get_supported_protocol_versions(), 'Protocol versions less than 5 are not supported')

greaterthancass20 = unittest.skipUnless(CASSANDRA_VERSION >= Version('2.1'), 'Cassandra version 2.1 or greater required')
greaterthancass21 = unittest.skipUnless(CASSANDRA_VERSION >= Version('2.2'), 'Cassandra version 2.2 or greater required')
greaterthanorequalcass30 = unittest.skipUnless(CASSANDRA_VERSION >= Version('3.0'), 'Cassandra version 3.0 or greater required')
greaterthanorequalcass36 = unittest.skipUnless(CASSANDRA_VERSION >= Version('3.6'), 'Cassandra version 3.6 or greater required')
greaterthanorequalcass3_10 = unittest.skipUnless(CASSANDRA_VERSION >= Version('3.10'), 'Cassandra version 3.10 or greater required')
greaterthanorequalcass3_11 = unittest.skipUnless(CASSANDRA_VERSION >= Version('3.11'), 'Cassandra version 3.10 or greater required')
greaterthanorequalcass40 = unittest.skipUnless(CASSANDRA_VERSION >= Version('4.0'), 'Cassandra version 4.0 or greater required')
lessthanorequalcass40 = unittest.skipIf(CASSANDRA_VERSION >= Version('4.0'), 'Cassandra version 4.0 or greater required')
lessthancass30 = unittest.skipUnless(CASSANDRA_VERSION < Version('3.0'), 'Cassandra version less then 3.0 required')
pypy = unittest.skipUnless(platform.python_implementation() == "PyPy", "Test is skipped unless it's on PyPy")
notpy3 = unittest.skipIf(sys.version_info >= (3, 0), "Test not applicable for Python 3.x runtime")
requiresmallclockgranularity = unittest.skipIf("Windows" in platform.system() or "asyncore" in EVENT_LOOP_MANAGER,
                                               "This test is not suitible for environments with large clock granularity")
requiressimulacron = unittest.skipIf(SIMULACRON_JAR is None or CASSANDRA_VERSION < Version("2.1"), "Simulacron jar hasn't been specified or C* version is 2.0")


def wait_for_node_socket(node, timeout):
    binary_itf = node.network_interfaces['binary']
    if not common.check_socket_listening(binary_itf, timeout=timeout):
        log.warning("Unable to connect to binary socket for node " + node.name)
    else:
        log.debug("Node %s is up and listening " % (node.name,))


def check_socket_listening(itf, timeout=60):
Beispiel #34
0
import os
import platform
import sys

# Operating systems.
WINDOWS = sys.platform == "win32"
LINUX = sys.platform.startswith("linux")

# Python versions. We amend version_info with one more value, a zero if an
# official version, or 1 if built from source beyond an official version.
PYVERSION = sys.version_info + (int(platform.python_version()[-1] == "+"), )
PY2 = PYVERSION < (3, 0)
PY3 = PYVERSION >= (3, 0)

# Python implementations.
PYPY = (platform.python_implementation() == 'PyPy')
if PYPY:
    PYPYVERSION = sys.pypy_version_info

PYPY2 = PYPY and PY2
PYPY3 = PYPY and PY3

JYTHON = (platform.python_implementation() == 'Jython')
IRONPYTHON = (platform.python_implementation() == 'IronPython')


# Python behavior
class PYBEHAVIOR(object):
    """Flags indicating this Python's behavior."""

    # Is "if __debug__" optimized away?
Beispiel #35
0
def version():
    """Return a string with various version information."""
    lines = ["qutebrowser v{}".format(qutebrowser.__version__)]
    gitver = _git_str()
    if gitver is not None:
        lines.append("Git commit: {}".format(gitver))

    lines.append("Backend: {}".format(_backend()))

    lines += [
        '',
        '{}: {}'.format(platform.python_implementation(),
                        platform.python_version()),
        'Qt: {}'.format(earlyinit.qt_version()),
        'PyQt: {}'.format(PYQT_VERSION_STR),
        '',
    ]

    lines += _module_versions()

    lines += [
        'pdf.js: {}'.format(_pdfjs_version()),
        'sqlite: {}'.format(sql.version()),
        'QtNetwork SSL: {}\n'.format(QSslSocket.sslLibraryVersionString()
                                     if QSslSocket.supportsSsl() else 'no'),
    ]

    qapp = QApplication.instance()
    if qapp:
        style = qapp.style()
        lines.append('Style: {}'.format(style.metaObject().className()))

    importpath = os.path.dirname(os.path.abspath(qutebrowser.__file__))

    lines += [
        'Platform: {}, {}'.format(platform.platform(),
                                  platform.architecture()[0]),
    ]
    dist = distribution()
    if dist is not None:
        lines += [
            'Linux distribution: {} ({})'.format(dist.pretty, dist.parsed.name)
        ]

    lines += [
        'Frozen: {}'.format(hasattr(sys, 'frozen')),
        "Imported from {}".format(importpath),
        "Using Python from {}".format(sys.executable),
        "Qt library executable path: {}, data path: {}".format(
            QLibraryInfo.location(QLibraryInfo.LibraryExecutablesPath),
            QLibraryInfo.location(QLibraryInfo.DataPath)
        )
    ]

    if not dist or dist.parsed == Distribution.unknown:
        lines += _os_info()

    lines += [
        '',
        'Paths:',
    ]
    for name, path in sorted(_path_info().items()):
        lines += ['{}: {}'.format(name, path)]

    lines += [
        '',
        'Uptime: {}'.format(_uptime()),
    ]

    return '\n'.join(lines)
Beispiel #36
0
    Rational,
    strip_context,
)
from mathics.core.rules import Pattern, Rule

from mathics.builtin.lists import (
    python_levelspec,
    walk_levels,
    InvalidLevelspecError,
    List,
)
from mathics.builtin.functional import Identity

import platform

if platform.python_implementation() == "PyPy":
    bytecount_support = False
else:
    from .pympler.asizeof import asizeof as count_bytes

    bytecount_support = True


class Sort(Builtin):
    """
    <dl>
    <dt>'Sort[$list$]'
    <dd>sorts $list$ (or the leaves of any other expression) according to canonical ordering.
    <dt>'Sort[$list$, $p$]'
    <dd>sorts using $p$ to determine the order of two elements.
    </dl>
        ('architecture', _arch),
        # Linux kernel version, OSX version, etc.
        ('version', _ver)
    ])
else:
    # Get potential alias (e.g. SunOS 5.11 becomes Solaris 2.11)
    _aliased = platform.system_alias(
        platform.system(), platform.release(), platform.version())
    _METADATA['os'] = SON([
        ('type', platform.system()),
        ('name', ' '.join([part for part in _aliased[:2] if part])),
        ('architecture', platform.machine()),
        ('version', _aliased[2])
    ])

if platform.python_implementation().startswith('PyPy'):
    _METADATA['platform'] = ' '.join(
        (platform.python_implementation(),
         '.'.join(imap(str, sys.pypy_version_info)),
         '(Python %s)' % '.'.join(imap(str, sys.version_info))))
elif sys.platform.startswith('java'):
    _METADATA['platform'] = ' '.join(
        (platform.python_implementation(),
         '.'.join(imap(str, sys.version_info)),
         '(%s)' % ' '.join((platform.system(), platform.release()))))
else:
    _METADATA['platform'] = ' '.join(
        (platform.python_implementation(),
         '.'.join(imap(str, sys.version_info))))

utf_16_csv_rel = DocutilsTestSupport.utils.relative_path(None, utf_16_csv)
empty_txt = os.path.join(mydir, 'empty.txt')

unichr_exception = DocutilsTestSupport.exception_data(unichr,
                                                      int("9999999999999",
                                                          16))[0]
if isinstance(unichr_exception, OverflowError):
    unichr_exception_string = 'code too large (%s)' % unichr_exception
else:
    unichr_exception_string = str(unichr_exception)

# some error messages changed in Python 3.3:
# CPython has backported to 2.7.4, PyPy has not
# platform.python_implementation is new in 2.6
csv_eod_error_str = 'unexpected end of data'
if sys.version_info < (2, 7, 4) or platform.python_implementation() == 'PyPy':
    csv_eod_error_str = 'newline inside string'
# pypy adds a line number
if sys.version_info > (2, 6) and platform.python_implementation() == 'PyPy':
    csv_eod_error_str = 'line 1: ' + csv_eod_error_str
csv_unknown_url = "'bogus.csv'"
if sys.version_info < (3, 3, 2):
    csv_unknown_url = "bogus.csv"


def null_bytes():
    import csv
    csv_data = open(utf_16_csv, 'rb').read()
    csv_data = unicode(csv_data, 'latin1').splitlines()
    reader = csv.reader(
        [tables.CSVTable.encode_for_csv(line + '\n') for line in csv_data])
Beispiel #39
0
except ImportError:
    from ordereddict import OrderedDict  # type: ignore
    from counter import Counter  # type: ignore

try:
    from collections import abc
except ImportError:
    import collections as abc  # type: ignore

if False:
    from typing import Type, Tuple  # noqa


PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PYPY = platform.python_implementation() == 'PyPy'
CAN_UNPACK_BYTE_ARRAY = sys.version_info[:3] >= (2, 7, 4)
CAN_PACK_HALF_FLOAT = sys.version_info[:2] >= (3, 6)

WINDOWS = platform.system() == 'Windows'

if sys.version_info[:2] <= (2, 6):
    raise ImportError(
        'Hypothesis is not supported on Python versions before 2.7'
    )


def bit_length(n):
    return n.bit_length()

Beispiel #40
0
from __future__ import print_function

from compas.cad.rhino.forms import Form

try:
    import scriptcontext as sc
    import System
    from System.Drawing import Size
    from System.Drawing import Point
    from System.Drawing import Color
    from System.Windows.Forms import TextBox
    from System.Windows.Forms import TrackBar

except ImportError:
    import platform
    if platform.python_implementation() == 'IronPython':
        raise

__author__ = [
    'Tom Van Mele',
]
__copyright__ = 'Copyright 2014, BLOCK Research Group - ETH Zurich'
__license__ = 'MIT License'
__email__ = '*****@*****.**'

__all__ = [
    'SliderForm',
]


class SliderForm(Form):
Beispiel #41
0
"""
Run formatter tests
"""

import platform
import sys

from pprint import pformat

import pytest

from py3status.composite import Composite
from py3status.formatter import Formatter
from py3status.py3 import NoneColor

is_pypy = platform.python_implementation() == 'PyPy'
f = Formatter()

python2 = sys.version_info < (3, 0)

param_dict = {
    'name': u'Björk',
    'number': 42,
    'pi': 3.14159265359,
    'yes': True,
    'no': False,
    'empty': '',
    'None': None,
    '?bad name': 'evil',
    u'☂ Very bad name ': u'☂ extremely evil',
    'long_str': 'I am a long string though not too long',
Beispiel #42
0
def user_agent():
    """
    Return a string representing the user agent.
    """
    data = {
        "installer": {
            "name": "pip",
            "version": pip.__version__
        },
        "python": platform.python_version(),
        "implementation": {
            "name": platform.python_implementation(),
        },
    }

    if data["implementation"]["name"] == 'CPython':
        data["implementation"]["version"] = platform.python_version()
    elif data["implementation"]["name"] == 'PyPy':
        if sys.pypy_version_info.releaselevel == 'final':
            pypy_version_info = sys.pypy_version_info[:3]
        else:
            pypy_version_info = sys.pypy_version_info
        data["implementation"]["version"] = ".".join(
            [str(x) for x in pypy_version_info])
    elif data["implementation"]["name"] == 'Jython':
        # Complete Guess
        data["implementation"]["version"] = platform.python_version()
    elif data["implementation"]["name"] == 'IronPython':
        # Complete Guess
        data["implementation"]["version"] = platform.python_version()

    if sys.platform.startswith("linux"):
        from pip._vendor import distro
        distro_infos = dict(
            filter(
                lambda x: x[1],
                zip(["name", "version", "id"], distro.linux_distribution()),
            ))
        libc = dict(
            filter(
                lambda x: x[1],
                zip(["lib", "version"], libc_ver()),
            ))
        if libc:
            distro_infos["libc"] = libc
        if distro_infos:
            data["distro"] = distro_infos

    if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
        data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}

    if platform.system():
        data.setdefault("system", {})["name"] = platform.system()

    if platform.release():
        data.setdefault("system", {})["release"] = platform.release()

    if platform.machine():
        data["cpu"] = platform.machine()

    # Python 2.6 doesn't have ssl.OPENSSL_VERSION.
    if HAS_TLS and sys.version_info[:2] > (2, 6):
        data["openssl_version"] = ssl.OPENSSL_VERSION

    return "{data[installer][name]}/{data[installer][version]} {json}".format(
        data=data,
        json=json.dumps(data, separators=(",", ":"), sort_keys=True),
    )
Beispiel #43
0
def cli_main(argv):
    IEDLog.IEDLogToController = False
    IEDLog.IEDLogToSyslog = True
    IEDLog.IEDLogToStdOut = True
    IEDLog.IEDLogToFile = False

    from IEDCLIController import IEDCLIController
    clicontroller = IEDCLIController.alloc().init()

    try:
        # Initialize user defaults before application starts.
        defaults = NSUserDefaults.standardUserDefaults()
        defaultsPath = NSBundle.mainBundle().pathForResource_ofType_(
            u"Defaults", u"plist")
        defaultsDict = NSDictionary.dictionaryWithContentsOfFile_(defaultsPath)
        defaults.registerDefaults_(defaultsDict)

        p = argparse.ArgumentParser()
        p.add_argument(u"-v",
                       u"--verbose",
                       action=u"store_true",
                       help=u"Verbose output")
        p.add_argument(u"-L",
                       u"--log-level",
                       type=int,
                       choices=range(0, 8),
                       default=6,
                       metavar=u"LEVEL",
                       help=u"Log level (0-7), default 6")
        p.add_argument(u"-l", u"--logfile", help=u"Log to file")
        p.add_argument(u"-r",
                       u"--root",
                       action=u"store_true",
                       help=u"Allow running as root")
        sp = p.add_subparsers(title=u"subcommands", dest=u"subcommand")

        # Populate subparser for each verb.
        for verb in clicontroller.listVerbs():
            verb_method = getattr(clicontroller, u"cmd%s_" % verb.capitalize())
            addargs_method = getattr(clicontroller,
                                     u"addargs%s_" % verb.capitalize())
            parser = sp.add_parser(verb, help=verb_method.__doc__)
            addargs_method(parser)
            parser.set_defaults(func=verb_method)

        args = p.parse_args(argv)

        if args.verbose:
            IEDLog.IEDLogStdOutLogLevel = IEDLog.IEDLogLevelInfo
        else:
            IEDLog.IEDLogStdOutLogLevel = IEDLog.IEDLogLevelNotice

        IEDLog.IEDLogFileLogLevel = args.log_level

        if args.logfile:
            if args.logfile == u"-":
                # Redirect log to stdout instead.
                IEDLog.IEDLogFileHandle = sys.stdout
                IEDLog.IEDLogToStdOut = False
            else:
                try:
                    IEDLog.IEDLogFileHandle = open(args.logfile,
                                                   u"a",
                                                   buffering=1)
                except OSError as e:
                    print >> sys.stderr, (u"Couldn't open %s for writing" %
                                          (args.logfile)).encode(u"utf-8")
                    return os.EX_CANTCREAT
            IEDLog.IEDLogToFile = True

        # Check if we're running with root.
        if os.getuid() == 0:
            if args.root:
                fm = NSFileManager.defaultManager()
                url, error = fm.URLForDirectory_inDomain_appropriateForURL_create_error_(
                    NSApplicationSupportDirectory, NSUserDomainMask, None,
                    False, None)
                LogWarning(u"Running as root, using %@",
                           os.path.join(url.path(), u"AutoDMG"))
            else:
                LogError(
                    u"Running as root isn't recommended (use -r to override)")
                return os.EX_USAGE

        # Log version info on startup.
        version, build = IEDUtil.getAppVersion()
        LogInfo(u"AutoDMG v%@ build %@", version, build)
        name, version, build = IEDUtil.readSystemVersion_(u"/")
        LogInfo(u"%@ %@ %@", name, version, build)
        LogInfo(u"%@ %@ (%@)", platform.python_implementation(),
                platform.python_version(), platform.python_compiler())
        LogInfo(u"PyObjC %@", objc.__version__)

        return args.func(args)
    finally:
        clicontroller.cleanup()
Beispiel #44
0
import sys

if sys.version_info[0] < 3:
    from .utils_py2 import *
else:
    from .utils_py3 import *
#
# configure max recursion
#sys.setrecursionlimit(200)

try:
    import platform
except ImportError:
    python_implementation = 'Unknown'
else:
    python_implementation = platform.python_implementation()
    # can be 'CPython' or 'PyPy'

#------------------------------------------------------------------------------#
# library wide logging function
#------------------------------------------------------------------------------#


def log(msg):
    print(msg)


#------------------------------------------------------------------------------#
# additional bit list / str functions
#------------------------------------------------------------------------------#
 def __user_agent(self):
     python_verison = "{0}.{1}".format(sys.version_info.major,
                                       sys.version_info.minor)
     return 'twitter-ads version: {0} platform: Python {1} ({2}/{3})'.format(
         get_version(), python_verison, platform.python_implementation(),
         sys.platform)
Beispiel #46
0
    def make_vm(
        self,
        nodes,
        thunks,
        input_storage,
        output_storage,
        storage_map,
        post_thunk_clear,
        computed,
        compute_map,
        updated_vars,
    ):

        pre_call_clear = [storage_map[v] for v in self.no_recycling]

        if (
            self.callback is not None
            or self.callback_input is not None
            or ((config.profile or config.print_global_stats) and config.profile_memory)
            or (self.allow_partial_eval and not self.use_cloop)
        ):

            if self.use_cloop and (
                self.callback is not None or self.callback_input is not None
            ):
                logger.warning("CVM does not support callback, using Stack VM.")
            if self.use_cloop and config.profile_memory:
                warnings.warn("CVM does not support memory profile, using Stack VM.")
            if not self.use_cloop and self.allow_partial_eval:
                warnings.warn(
                    "LoopGC does not support partial evaluation, " "using Stack VM."
                )
            # Needed for allow_gc=True, profiling and storage_map reuse
            deps = self.compute_gc_dependencies(storage_map)
            vm = Stack(
                nodes,
                thunks,
                pre_call_clear,
                storage_map,
                compute_map,
                self.fgraph,
                self.allow_gc,
                len(updated_vars),
                dependencies=deps,
                callback=self.callback,
                callback_input=self.callback_input,
            )
        elif self.use_cloop:
            # create a map from nodes to ints and vars to ints
            nodes_idx = {}
            vars_idx = {}
            for i, node in enumerate(nodes):
                nodes_idx[node] = i
                for v in node.inputs + node.outputs:
                    vars_idx.setdefault(v, len(vars_idx))
            for v in self.fgraph.inputs + self.fgraph.outputs:
                vars_idx.setdefault(v, len(vars_idx))

            nodes_idx_inv = {}
            vars_idx_inv = {}
            for (node, i) in nodes_idx.items():
                nodes_idx_inv[i] = node
            for (var, i) in vars_idx.items():
                vars_idx_inv[i] = var

            # put storage_map and compute_map into a int-based scheme
            storage_map_list = [
                storage_map[vars_idx_inv[i]] for i in range(len(vars_idx_inv))
            ]
            compute_map_list = [
                compute_map[vars_idx_inv[i]] for i in range(len(vars_idx_inv))
            ]
            if nodes:
                assert type(storage_map_list[0]) is list
                assert type(compute_map_list[0]) is list

            # Needed for allow_gc=True, profiling and storage_map reuse
            dependency_map = self.compute_gc_dependencies(storage_map)
            dependency_map_list = [
                [vars_idx[d] for d in dependency_map[vars_idx_inv[i]]]
                for i in range(len(vars_idx_inv))
            ]

            # build the pointers to node inputs and offsets
            base_input_output_list = []
            node_n_inputs = []
            node_n_outputs = []
            node_input_offset = []
            node_output_offset = []
            for node in nodes:
                inputs_idx = [vars_idx[v] for v in node.inputs]
                outputs_idx = [vars_idx[v] for v in node.outputs]
                node_n_inputs.append(len(inputs_idx))
                node_n_outputs.append(len(outputs_idx))
                node_input_offset.append(len(base_input_output_list))
                base_input_output_list.extend(inputs_idx)
                node_output_offset.append(len(base_input_output_list))
                base_input_output_list.extend(outputs_idx)

            # build the var owner array
            var_owner = [None] * len(vars_idx)
            for (var, i) in vars_idx.items():
                if var.owner:
                    var_owner[i] = nodes_idx[var.owner]

            is_lazy_list = [int(th.lazy) for th in thunks]
            output_vars = [vars_idx[v] for v in self.fgraph.outputs]

            # builds the list of prereqs induced by e.g. destroy_handler
            ords = self.fgraph.orderings()
            node_prereqs = []
            node_output_size = []
            for i, node in enumerate(nodes):
                node_output_size.append(0)
                prereq_var_idxs = []
                for prereq_node in ords.get(node, []):
                    prereq_var_idxs.extend([vars_idx[v] for v in prereq_node.outputs])
                prereq_var_idxs = list(set(prereq_var_idxs))
                prereq_var_idxs.sort()  # TODO: why sort?
                node_prereqs.append(prereq_var_idxs)

            # Builds the list of input storage to update (according to update
            # rules) when the outputs are computed.
            # They are in the same order as the second part of output_vars
            # (output_vars contains first the returned outputs, then the
            # values of the update expressions).
            update_storage = []
            update_in_from_out = {}
            for (ivar, ovar) in updated_vars.items():
                update_in_from_out[vars_idx[ovar]] = vars_idx[ivar]
            for oidx in output_vars:
                if oidx in update_in_from_out:
                    update_storage.append(update_in_from_out[oidx])

            # PyPy has no sys.getrefcount, so ignore this check if not running
            # under CPython.
            if platform.python_implementation() == "CPython":
                c0 = sys.getrefcount(node_n_inputs)

            vm = CVM(
                nodes,
                thunks,
                pre_call_clear,
                allow_gc=self.allow_gc,
                call_counts=[0] * len(nodes),
                call_times=[0.0] * len(nodes),
                compute_map_list=compute_map_list,
                storage_map_list=storage_map_list,
                base_input_output_list=base_input_output_list,
                node_n_inputs=node_n_inputs,
                node_n_outputs=node_n_outputs,
                node_input_offset=node_input_offset,
                node_output_offset=node_output_offset,
                var_owner=var_owner,
                is_lazy_list=is_lazy_list,
                output_vars=output_vars,
                node_prereqs=node_prereqs,
                node_output_size=node_output_size,
                update_storage=update_storage,
                dependencies=dependency_map_list,
            )

            if platform.python_implementation() == "CPython":
                assert c0 == sys.getrefcount(node_n_inputs)
        else:
            lazy = self.lazy
            if lazy is None:
                lazy = config.vm.lazy
            if lazy is None:
                lazy = not all([(not th.lazy) for th in thunks])
            if not lazy:
                # there is no conditional in the graph
                if self.allow_gc:
                    vm = LoopGC(
                        nodes,
                        thunks,
                        pre_call_clear,
                        post_thunk_clear,
                    )
                else:
                    vm = Loop(
                        nodes,
                        thunks,
                        pre_call_clear,
                    )
            else:
                # Needed when allow_gc=True and profiling
                deps = self.compute_gc_dependencies(storage_map)
                vm = Stack(
                    nodes,
                    thunks,
                    pre_call_clear,
                    storage_map,
                    compute_map,
                    self.fgraph,
                    self.allow_gc,
                    len(updated_vars),
                    dependencies=deps,
                )
        return vm
Beispiel #47
0
    def _connect(self, timeout: float) -> None:
        """The function that runs on the connection thread. This will connect to Vector,
        and establish the BehaviorControl stream.
        """
        try:
            if threading.main_thread() is threading.current_thread():
                raise Exception(
                    "\n\nConnection._connect must be run outside of the main thread."
                )
            self._loop = asyncio.new_event_loop()
            asyncio.set_event_loop(self._loop)
            self._done_signal = asyncio.Event()
            if not self._requires_behavior_control:
                self._control_events = _ControlEventManager(self._loop)
            else:
                self._control_events = _ControlEventManager(
                    self._loop,
                    priority=CONTROL_PRIORITY_LEVEL.TOP_PRIORITY_AI)
            trusted_certs = None
            with open(self.cert_file, 'rb') as cert:
                trusted_certs = cert.read()

            # Pin the robot certificate for opening the channel
            channel_credentials = aiogrpc.ssl_channel_credentials(
                root_certificates=trusted_certs)
            # Add authorization header for all the calls
            call_credentials = aiogrpc.access_token_call_credentials(
                self._guid)

            credentials = aiogrpc.composite_channel_credentials(
                channel_credentials, call_credentials)

            self._logger.info(
                f"Connecting to {self.host} for {self.name} using {self.cert_file}"
            )
            self._channel = aiogrpc.secure_channel(
                self.host,
                credentials,
                options=((
                    "grpc.ssl_target_name_override",
                    self.name,
                ), ))

            # Verify the connection to Vector is able to be established (client-side)
            try:
                # Explicitly grab _channel._channel to test the underlying grpc channel directly
                grpc.channel_ready_future(self._channel._channel).result(
                    timeout=timeout)  # pylint: disable=protected-access
            except grpc.FutureTimeoutError as e:
                raise VectorNotFoundException() from e

            self._interface = client.ExternalInterfaceStub(self._channel)

            # Verify Vector and the SDK have compatible protocol versions
            version = protocol.ProtocolVersionRequest(
                client_version=CLIENT_VERSION,
                min_host_version=MIN_HOST_VERSION)
            protocol_version = self._loop.run_until_complete(
                self._interface.ProtocolVersion(version))
            if protocol_version.result != protocol.ProtocolVersionResponse.SUCCESS or MIN_HOST_VERSION > protocol_version.host_version:  # pylint: disable=no-member
                raise VectorInvalidVersionException(version, protocol_version)

            self._control_stream_task = self._loop.create_task(
                self._open_connections())

            # Initialze SDK
            sdk_module_version = __version__
            python_version = platform.python_version()
            python_implementation = platform.python_implementation()
            os_version = platform.platform()
            cpu_version = platform.machine()
            initialize = protocol.SDKInitializationRequest(
                sdk_module_version=sdk_module_version,
                python_version=python_version,
                python_implementation=python_implementation,
                os_version=os_version,
                cpu_version=cpu_version)
            self._loop.run_until_complete(
                self._interface.SDKInitialization(initialize))

            if self._requires_behavior_control:
                self._loop.run_until_complete(
                    self._request_control(timeout=timeout))
        except Exception as e:  # pylint: disable=broad-except
            # Propagate the errors to the calling thread
            setattr(self._ready_signal, "exception", e)
            return
        finally:
            self._ready_signal.set()

        async def wait_until_done():
            return await self._done_signal.wait()

        self._loop.run_until_complete(wait_until_done())
Beispiel #48
0
def main():
    usage = "usage: %prog [-h|--help] [options]"
    parser = OptionParser(usage=usage)
    parser.add_option("-t",
                      "--throughput",
                      action="store_true",
                      dest="throughput",
                      default=False,
                      help="run throughput tests")
    parser.add_option("-l",
                      "--latency",
                      action="store_true",
                      dest="latency",
                      default=False,
                      help="run latency tests")
    parser.add_option("-b",
                      "--bandwidth",
                      action="store_true",
                      dest="bandwidth",
                      default=False,
                      help="run I/O bandwidth tests")
    parser.add_option("-i",
                      "--interval",
                      action="store",
                      type="int",
                      dest="check_interval",
                      default=None,
                      help="sys.setcheckinterval() value")
    parser.add_option("-I",
                      "--switch-interval",
                      action="store",
                      type="float",
                      dest="switch_interval",
                      default=None,
                      help="sys.setswitchinterval() value")
    parser.add_option("-n",
                      "--num-threads",
                      action="store",
                      type="int",
                      dest="nthreads",
                      default=4,
                      help="max number of threads in tests")

    # Hidden option to run the pinging and bandwidth clients
    parser.add_option("",
                      "--latclient",
                      action="store",
                      dest="latclient",
                      default=None,
                      help=SUPPRESS_HELP)
    parser.add_option("",
                      "--bwclient",
                      action="store",
                      dest="bwclient",
                      default=None,
                      help=SUPPRESS_HELP)

    options, args = parser.parse_args()
    if args:
        parser.error("unexpected arguments")

    if options.latclient:
        kwargs = eval(options.latclient)
        latency_client(**kwargs)
        return

    if options.bwclient:
        kwargs = eval(options.bwclient)
        bandwidth_client(**kwargs)
        return

    if not options.throughput and not options.latency and not options.bandwidth:
        options.throughput = options.latency = options.bandwidth = True
    if options.check_interval:
        sys.setcheckinterval(options.check_interval)
    if options.switch_interval:
        sys.setswitchinterval(options.switch_interval)

    print("== %s %s (%s) ==" % (
        platform.python_implementation(),
        platform.python_version(),
        platform.python_build()[0],
    ))
    # Processor identification often has repeated spaces
    cpu = ' '.join(platform.processor().split())
    print("== %s %s on '%s' ==" % (
        platform.machine(),
        platform.system(),
        cpu,
    ))
    print()

    if options.throughput:
        print("--- Throughput ---")
        print()
        run_throughput_tests(options.nthreads)

    if options.latency:
        print("--- Latency ---")
        print()
        run_latency_tests(options.nthreads)

    if options.bandwidth:
        print("--- I/O bandwidth ---")
        print()
        run_bandwidth_tests(options.nthreads)
    def _main(self, args):
        # type: (List[str]) -> int
        # Intentionally set as early as possible so globally-managed temporary
        # directories are available to the rest of the code.
        self.enter_context(global_tempdir_manager())

        options, args = self.parse_args(args)

        # Set verbosity so that it can be used elsewhere.
        self.verbosity = options.verbose - options.quiet

        level_number = setup_logging(
            verbosity=self.verbosity,
            no_color=options.no_color,
            user_log_file=options.log,
        )

        if (sys.version_info[:2] == (2, 7)
                and not options.no_python_version_warning):
            message = (
                "A future version of pip will drop support for Python 2.7. "
                "More details about Python 2 support in pip, can be found at "
                "https://pip.pypa.io/en/latest/development/release-process/#python-2-support"  # noqa
            )
            if platform.python_implementation() == "CPython":
                message = (
                    "Python 2.7 reached the end of its life on January "
                    "1st, 2020. Please upgrade your Python as Python 2.7 "
                    "is no longer maintained. ") + message
            deprecated(message, replacement=None, gone_in=None)

        if options.skip_requirements_regex:
            deprecated(
                "--skip-requirements-regex is unsupported and will be removed",
                replacement=(
                    "manage requirements/constraints files explicitly, "
                    "possibly generating them from metadata"),
                gone_in="20.1",
                issue=7297,
            )

        # TODO: Try to get these passing down from the command?
        #       without resorting to os.environ to hold these.
        #       This also affects isolated builds and it should.

        if options.no_input:
            os.environ['PIP_NO_INPUT'] = '1'

        if options.exists_action:
            os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)

        if options.require_venv and not self.ignore_require_venv:
            # If a venv is required check if it can really be found
            if not running_under_virtualenv():
                logger.critical(
                    'Could not find an activated virtualenv (required).')
                sys.exit(VIRTUALENV_NOT_FOUND)

        if options.cache_dir:
            options.cache_dir = normalize_path(options.cache_dir)
            if not check_path_owner(options.cache_dir):
                logger.warning(
                    "The directory '%s' or its parent directory is not owned "
                    "or is not writable by the current user. The cache "
                    "has been disabled. Check the permissions and owner of "
                    "that directory. If executing pip with sudo, you may want "
                    "sudo's -H flag.",
                    options.cache_dir,
                )
                options.cache_dir = None

        try:
            status = self.run(options, args)
            # FIXME: all commands should return an exit status
            # and when it is done, isinstance is not needed anymore
            if isinstance(status, int):
                return status
        except PreviousBuildDirError as exc:
            logger.critical(str(exc))
            logger.debug('Exception information:', exc_info=True)

            return PREVIOUS_BUILD_DIR_ERROR
        except (InstallationError, UninstallationError, BadCommand) as exc:
            logger.critical(str(exc))
            logger.debug('Exception information:', exc_info=True)

            return ERROR
        except CommandError as exc:
            logger.critical('%s', exc)
            logger.debug('Exception information:', exc_info=True)

            return ERROR
        except BrokenStdoutLoggingError:
            # Bypass our logger and write any remaining messages to stderr
            # because stdout no longer works.
            print('ERROR: Pipe to stdout was broken', file=sys.stderr)
            if level_number <= logging.DEBUG:
                traceback.print_exc(file=sys.stderr)

            return ERROR
        except KeyboardInterrupt:
            logger.critical('Operation cancelled by user')
            logger.debug('Exception information:', exc_info=True)

            return ERROR
        except BaseException:
            logger.critical('Exception:', exc_info=True)

            return UNKNOWN_ERROR
        finally:
            self.handle_pip_version_check(options)

        return SUCCESS
Beispiel #50
0
                                                  "module" % (name,),
                                                  "The output above "
                                                  "this warning shows how "
                                                  "the compilation "
                                                  "failed."))


kwargs = {}

version = "0.1.0"

with open('README.rst') as f:
    kwargs['long_description'] = f.read()


if (platform.python_implementation() == "CPython" and
        os.environ.get('DUROTAR_EXTENSION') != '0'):
    # This extension builds and works on pypy as well, although pypy's jit
    # produces equivalent performance.
    kwargs['ext_modules'] = [
        Extension('durotar.cfilters',
                  sources=['durotar/filters.c']),
    ]

    if os.environ.get('TORNADO_EXTENSION') != '1':
        # Unless the user has specified that the extension is mandatory,
        # fall back to the pure-python implementation on any build failure.
        kwargs['cmdclass'] = {'build_ext': custom_build_ext}

setup(
    name="durotar",
Beispiel #51
0
class ExecutorTest:
    # Executor.shutdown() and context manager usage is tested by
    # ExecutorShutdownTest.
    def test_submit(self):
        future = self.executor.submit(pow, 2, 8)
        assert 256 == future.result()

    def test_submit_keyword(self):
        future = self.executor.submit(mul, 2, y=8)
        assert 16 == future.result()

    def test_map(self):
        assert list(self.executor.map(pow, range(10), range(10))) == \
            list(map(pow, range(10), range(10)))

    def test_map_exception(self):
        i = self.executor.map(divmod, [1, 1, 1, 1], [2, 3, 0, 5])
        assert next(i), (0 == 1)
        assert next(i), (0 == 1)
        with pytest.raises(ZeroDivisionError):
            next(i)

    def test_map_timeout(self):
        results = []
        with pytest.raises(futures.TimeoutError):
            for i in self.executor.map(time.sleep, [0, 0, 5], timeout=1):
                results.append(i)

        assert [None, None] == results

    def test_shutdown_race_issue12456(self):
        # Issue #12456: race condition at shutdown where trying to post a
        # sentinel in the call queue blocks (the queue is full while processes
        # have exited).
        self.executor.map(str, [2] * (self.worker_count + 1))
        self.executor.shutdown()

    @pytest.mark.skipif(
        platform.python_implementation() != "CPython"
        or (sys.version_info >= (3, 8, 0) and sys.version_info < (3, 8, 2)),
        reason="Underlying bug fixed upstream starting Python 3.8.2")
    def test_no_stale_references(self):
        # Issue #16284: check that the executors don't unnecessarily hang onto
        # references.

        # This test has to be skipped on early Python 3.8 versions because of a
        # low-level reference cycle inside the pickle module for early versions
        # of Python 3.8 preventing stale references from being collected. See
        # cloudpipe/cloudpickle#327 as well as
        # https://bugs.python.org/issue39492
        my_object = MyObject()
        collect = threading.Event()
        _ref = weakref.ref(my_object, lambda obj: collect.set())  # noqa
        # Deliberately discarding the future.
        self.executor.submit(my_object.my_method)
        del my_object

        collected = False
        for _ in range(5):
            if IS_PYPY:
                gc.collect()
            collected = collect.wait(timeout=1.0)
            if collected:
                return
        assert collected, "Stale reference not collected within timeout."

    def test_max_workers_negative(self):
        for number in (0, -1):
            with pytest.raises(ValueError) as infos:
                self.executor_type(max_workers=number)
            assert infos.value.args[0] == "max_workers must be greater than 0"

    @pytest.mark.broken_pool
    def test_killed_child(self):
        # When a child process is abruptly terminated, the whole pool gets
        # "broken".
        future = self.executor.submit(time.sleep, 30)
        # Get one of the processes, and terminate (kill) it
        p = next(iter(self.executor._processes.values()))
        p.terminate()
        match = filter_match(r"SIGTERM")
        with pytest.raises(TerminatedWorkerError, match=match):
            future.result()
        # Submitting other jobs fails as well.
        with pytest.raises(TerminatedWorkerError, match=match):
            self.executor.submit(pow, 2, 8)

    def test_map_chunksize(self):
        def bad_map():
            list(self.executor.map(pow, range(40), range(40), chunksize=-1))

        ref = list(map(pow, range(40), range(40)))
        assert list(self.executor.map(pow, range(40), range(40),
                                      chunksize=6)) == ref
        assert list(self.executor.map(pow, range(40), range(40),
                                      chunksize=50)) == ref
        assert list(self.executor.map(pow, range(40), range(40),
                                      chunksize=40)) == ref
        with pytest.raises(ValueError):
            bad_map()

    @classmethod
    def _test_traceback(cls):
        raise RuntimeError(123)  # some comment

    def test_traceback(self):
        # We want ensure that the traceback from the child process is
        # contained in the traceback raised in the main process.
        future = self.executor.submit(self._test_traceback)
        with pytest.raises(Exception) as cm:
            future.result()

        exc = cm.value
        assert type(exc) is RuntimeError
        assert exc.args == (123, )

        cause = exc.__cause__
        assert type(cause) is process_executor._RemoteTraceback
        assert 'raise RuntimeError(123)  # some comment' in cause.tb

    #
    # The following tests are new additions to the test suite originally
    # backported from the Python 3 concurrent.futures package.
    #

    def _test_thread_safety(self, thread_idx, results):
        try:
            # submit a mix of very simple tasks with map and submit,
            # cancel some of them and check the results
            map_future_1 = self.executor.map(sqrt, range(40), timeout=10)
            if thread_idx % 2 == 0:
                # Make it more likely for scheduling threads to overtake one
                # another
                time.sleep(0.001)
            submit_futures = [
                self.executor.submit(time.sleep, 0.0001) for _ in range(20)
            ]
            for i, f in enumerate(submit_futures):
                if i % 2 == 0:
                    f.cancel()
            map_future_2 = self.executor.map(sqrt, range(40), timeout=10)

            assert list(map_future_1) == [sqrt(x) for x in range(40)]
            assert list(map_future_2) == [sqrt(i) for i in range(40)]
            for i, f in enumerate(submit_futures):
                if i % 2 == 1 or not f.cancelled():
                    assert f.result(timeout=10) is None
            results[thread_idx] = 'ok'
        except Exception:
            # Ensure that py.test can report the content of the exception
            results[thread_idx] = traceback.format_exc()

    def test_thread_safety(self):
        # Check that our process-pool executor can be shared to schedule work
        # by concurrent threads
        results = [None] * 10
        threads = [
            Thread(target=self._test_thread_safety, args=(i, results))
            for i in range(len(results))
        ]

        for t in threads:
            t.start()
        for t in threads:
            t.join()
        for result in results:
            if result != "ok":
                raise AssertionError(result)

    @classmethod
    def return_inputs(cls, *args):
        return args

    def test_submit_from_callback(self):
        collected = defaultdict(list)
        executor = self.executor

        def _collect_and_submit_next(future):
            name, count = future.result()
            collected[name].append(count)
            if count > 0:
                future = executor.submit(self.return_inputs, name, count - 1)
                future.add_done_callback(_collect_and_submit_next)

        # Start 3 concurrent callbacks chains
        fa = executor.submit(self.return_inputs, 'chain a', 100)
        fa.add_done_callback(_collect_and_submit_next)
        fb = executor.submit(self.return_inputs, 'chain b', 50)
        fb.add_done_callback(_collect_and_submit_next)
        fc = executor.submit(self.return_inputs, 'chain c', 60)
        fc.add_done_callback(_collect_and_submit_next)
        assert fa.result() == ('chain a', 100)
        assert fb.result() == ('chain b', 50)
        assert fc.result() == ('chain c', 60)

        # Wait a maximum of 5s for the asynchronous callback chains to complete
        patience = 500
        while True:
            if (collected['chain a'] == list(range(100, -1, -1))
                    and collected['chain b'] == list(range(50, -1, -1))
                    and collected['chain c'] == list(range(60, -1, -1))):
                # the recursive callback chains have completed successfully
                break
            elif patience < 0:
                raise AssertionError(
                    f"callback submit chains stalled at: {collected!r}")
            else:
                patience -= 1
                time.sleep(0.01)

    @pytest.mark.timeout(60)
    def test_worker_timeout(self):
        self.executor.shutdown(wait=True)
        self.check_no_running_workers(patience=5)
        timeout = getattr(self, 'min_worker_timeout', .01)
        try:
            self.executor = self.executor_type(max_workers=4,
                                               context=self.context,
                                               timeout=timeout)
        except NotImplementedError as e:
            self.skipTest(str(e))

        for _ in range(5):
            # Trigger worker spawn for lazy executor implementations
            for _ in self.executor.map(id, range(8)):
                pass

            # Check that all workers shutdown (via timeout) when waiting a bit:
            # note that the effictive time for Python process to completely
            # shutdown can vary a lot especially on loaded CI machines with and
            # the atexit callbacks that writes test coverage data to disk.
            # Let's be patient.
            self.check_no_running_workers(patience=5)

    @classmethod
    def reducer_in(cls, obj):
        return MyObject, (obj.value + 5, )

    @classmethod
    def reducer_out(cls, obj):
        return MyObject, (7 * obj.value, )

    def test_serialization(self):
        """Test custom serialization for process_executor"""
        self.executor.shutdown(wait=True)

        # Use non commutative operation to check correct order
        job_reducers = {}
        job_reducers[MyObject] = self.reducer_in
        result_reducers = {}
        result_reducers[MyObject] = self.reducer_out

        # Create a new executor to ensure that we did not mess with the
        # existing module level serialization
        executor = self.executor_type(max_workers=2,
                                      context=self.context,
                                      job_reducers=job_reducers,
                                      result_reducers=result_reducers)
        self.executor = self.executor_type(max_workers=2, context=self.context)

        obj = MyObject(1)
        try:
            ret_obj_custom = executor.submit(self.return_inputs,
                                             obj).result()[0]
            ret_obj = self.executor.submit(self.return_inputs, obj).result()[0]

            assert ret_obj.value == 1
            assert ret_obj_custom.value == 42
        finally:
            executor.shutdown(wait=True)

    @classmethod
    def _test_max_depth(cls, max_depth=10, kill_workers=False, ctx=None):
        if max_depth == 0:
            return 42
        executor = cls.executor_type(1, context=ctx)
        f = executor.submit(cls._test_max_depth, max_depth - 1, ctx)
        try:
            return f.result()
        finally:
            executor.shutdown(wait=True, kill_workers=kill_workers)

    @pytest.mark.parametrize('kill_workers', [True, False])
    def test_max_depth(self, kill_workers):
        from loky.process_executor import MAX_DEPTH
        if self.context.get_start_method() == 'fork':
            # For 'fork', we do not allow nested process as the threads ends
            # up in messy states
            with pytest.raises(LokyRecursionError):
                self._test_max_depth(max_depth=2, ctx=self.context)
            return

        assert self._test_max_depth(max_depth=MAX_DEPTH,
                                    kill_workers=kill_workers,
                                    ctx=self.context) == 42

        with pytest.raises(LokyRecursionError):
            self._test_max_depth(max_depth=MAX_DEPTH + 1,
                                 kill_workers=kill_workers,
                                 ctx=self.context)

    @pytest.mark.high_memory
    @pytest.mark.skipif(sys.maxsize < 2**32,
                        reason="Test requires a 64 bit version of Python")
    @pytest.mark.skipif(
        sys.version_info < (3, 8),
        reason=
        "Python version does not support pickling objects of size > 2 ** 31GB")
    def test_no_failure_on_large_data_send(self):
        data = b'\x00' * int(2.2e9)
        self.executor.submit(id, data).result()

    @pytest.mark.high_memory
    @pytest.mark.skipif(sys.maxsize < 2**32,
                        reason="Test requires a 64 bit version of Python")
    @pytest.mark.skipif(
        sys.version_info >= (3, 8),
        reason="Python version supports pickling objects of size > 2 ** 31GB")
    def test_expected_failure_on_large_data_send(self):
        data = b'\x00' * int(2.2e9)
        with pytest.raises(RuntimeError):
            self.executor.submit(id, data).result()

    def test_memory_leak_protection(self):
        self.executor.shutdown(wait=True)

        executor = self.executor_type(1, context=self.context)

        def _leak_some_memory(size=int(3e6), delay=0.001):
            """function that leaks some memory """
            from loky import process_executor
            process_executor._MEMORY_LEAK_CHECK_DELAY = 0.1
            if getattr(os, '_loky_leak', None) is None:
                os._loky_leak = []

            os._loky_leak.append(b"\x00" * size)

            # Leave enough time for the memory leak detector to kick-in:
            # by default the process does not check its memory usage
            # more than once per second.
            time.sleep(delay)

            leaked_size = sum(len(buffer) for buffer in os._loky_leak)
            return os.getpid(), leaked_size

        with pytest.warns(UserWarning, match='memory leak'):
            # Total run time should be 3s which is way over the 1s cooldown
            # period between two consecutive memory checks in the worker.
            futures = [executor.submit(_leak_some_memory) for _ in range(300)]

            executor.shutdown(wait=True)
            results = [f.result() for f in futures]

            # The pid of the worker has changed when restarting the worker
            first_pid, last_pid = results[0][0], results[-1][0]
            assert first_pid != last_pid

            # The restart happened after 100 MB of leak over the
            # default process size + what has leaked since the last
            # memory check.
            for _, leak_size in results:
                assert leak_size / 1e6 < 650

    def test_reference_cycle_collection(self):
        # make the parallel call create a reference cycle and make
        # a weak reference to be able to track the garbage collected objects
        self.executor.shutdown(wait=True)

        executor = self.executor_type(1, context=self.context)

        def _create_cyclic_reference(delay=0.001):
            """function that creates a cyclic reference"""
            from loky import process_executor
            process_executor._USE_PSUTIL = False
            process_executor._MEMORY_LEAK_CHECK_DELAY = 0.1

            class A:
                def __init__(self, size=int(1e6)):
                    self.data = b"\x00" * size
                    self.a = self

            if getattr(os, '_loky_cyclic_weakrefs', None) is None:
                os._loky_cyclic_weakrefs = []

            a = A()
            time.sleep(delay)
            os._loky_cyclic_weakrefs.append(weakref.ref(a))
            return sum(1 for r in os._loky_cyclic_weakrefs if r() is not None)

        # Total run time should be 3s which is way over the 1s cooldown
        # period between two consecutive memory checks in the worker.
        futures = [
            executor.submit(_create_cyclic_reference) for _ in range(300)
        ]

        executor.shutdown(wait=True)

        max_active_refs_count = max(f.result() for f in futures)
        assert max_active_refs_count < 150
        assert max_active_refs_count != 1

    @pytest.mark.broken_pool
    def test_exited_child(self):
        # When a child process is abruptly terminated, the whole pool gets
        # "broken".
        print(self.context.get_start_method())
        match = filter_match(r"EXIT\(42\)")
        future = self.executor.submit(c_exit, 42)
        with pytest.raises(TerminatedWorkerError, match=match):
            future.result()
        # Submitting other jobs fails as well.
        with pytest.raises(TerminatedWorkerError, match=match):
            self.executor.submit(pow, 2, 8)

    @staticmethod
    def _test_child_env(var_name):
        import os
        return os.environ.get(var_name, "unset")

    def test_child_env_executor(self):
        # Test that for loky context, setting argument env correctly overwrite
        # the environment of the child process.
        if self.context.get_start_method() != 'loky':
            pytest.skip(msg="Only work with loky context")

        var_name = "loky_child_env_executor"
        var_value = "variable set"
        executor = self.executor_type(1, env={var_name: var_value})

        var_child = executor.submit(self._test_child_env, var_name).result()
        assert var_child == var_value

        executor.shutdown(wait=True)

    def test_viztracer_profiler(self):
        # Check that viztracer profiler is initialzed in workers when
        # installed.
        viztracer = pytest.importorskip("viztracer")

        def check_viztracer_active():
            tracer = viztracer.get_tracer()
            if tracer is None:
                return False
            return tracer.enable

        active_in_main_process = check_viztracer_active()
        with self.executor_type(1, context=self.context) as e:
            active_in_child_process = e.submit(check_viztracer_active).result()
        assert active_in_main_process == active_in_child_process

        if not active_in_main_process:
            tracer = viztracer.VizTracer()
            try:
                tracer.start()
                with self.executor_type(1, context=self.context) as e:
                    assert e.submit(check_viztracer_active).result()
            finally:
                tracer.stop()

            # Once the tracer has been stopped, should be no side effect on
            # workers started in new executors.
            with self.executor_type(1, context=self.context) as e:
                assert not e.submit(check_viztracer_active).result()

    def test_viztracer_profiler_with_custom_init(self):
        # Check that viztracer profiler is initialzed in workers when
        # installed.
        viztracer = pytest.importorskip("viztracer")

        # Make sure the auto-viztracer initialization works even when
        # the call pass their own init.

        def check_viztracer_active_and_custom_init():
            assert loky._custom_global_var == 42
            tracer = viztracer.get_tracer()
            if tracer is None:
                return False
            return tracer.enable

        existing_tracer = viztracer.get_tracer()
        if existing_tracer is not None and existing_tracer.enable:
            pytest.skip("Cannot run this test if viztracer is active")

        tracer = viztracer.VizTracer()
        try:
            tracer.start()
            with self.executor_type(1,
                                    context=self.context,
                                    initializer=_custom_initializer) as e:
                assert e.submit(
                    check_viztracer_active_and_custom_init).result()
        finally:
            tracer.stop()
Beispiel #52
0
# <attrs is used internally by attrs
# <__array_function__ is used by numpy
IGNORE_BASENAMES_STARTING_WITH = ('<frozen ', '<builtin', '<attrs',
                                  '<__array_function__')

# Note: <string> has special heuristics to know whether it should be traced or not (it's part of
# user code when it's the <string> used in python -c and part of the library otherwise).

# Any filename that starts with these strings is considered user (project) code. Note
# that files for which we have a source mapping are also considered as a part of the project.
USER_CODE_BASENAMES_STARTING_WITH = ('<ipython', )

# Any filename that starts with these strings is considered library code (note: checked after USER_CODE_BASENAMES_STARTING_WITH).
LIBRARY_CODE_BASENAMES_STARTING_WITH = ('<', )

IS_CPYTHON = platform.python_implementation() == 'CPython'

# Hold a reference to the original _getframe (because psyco will change that as soon as it's imported)
IS_IRONPYTHON = sys.platform == 'cli'
try:
    get_frame = sys._getframe
    if IS_IRONPYTHON:

        def get_frame():
            try:
                return sys._getframe()
            except ValueError:
                pass

except AttributeError:
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved.
#
"""
Various constants
"""

import platform
import sys

from .compat import TO_UNICODE
from .version import VERSION

SNOWFLAKE_CONNECTOR_VERSION = u'.'.join(TO_UNICODE(v) for v in VERSION[0:3])
PYTHON_VERSION = u'.'.join(TO_UNICODE(v) for v in sys.version_info[:3])
OPERATING_SYSTEM = platform.system()
PLATFORM = platform.platform()
IMPLEMENTATION = platform.python_implementation()
COMPILER = platform.python_compiler()

CLIENT_NAME = u"PythonConnector"  # don't change!
CLIENT_VERSION = u'.'.join([TO_UNICODE(v) for v in VERSION[:3]])
Beispiel #54
0
def main():
    # OS X support: allow dylibs to see each other; needed by SWIG
    from platform import python_implementation
    if python_implementation() == 'CPython' and platform == 'darwin':
        from ctypes import RTLD_GLOBAL
        sys.setdlopenflags(RTLD_GLOBAL)

    sys.setrecursionlimit(10000)

    print("PyExZ3 (Python Exploration with Z3)")

    sys.path = [os.path.abspath(os.path.join(os.path.dirname(__file__)))
                ] + sys.path

    usage = "usage: %prog [options] <path to a *.py file>"
    parser = OptionParser(usage=usage)

    # Setup
    setup_group = OptionGroup(parser, "Exploration Setup")
    setup_group.add_option("-s",
                           "--start",
                           dest="entry",
                           action="store",
                           help="Specify entry point",
                           default="")
    setup_group.add_option("--cvc",
                           dest="solver",
                           action="store_const",
                           const="cvc",
                           help="Use the CVC SMT solver instead of Z3")
    setup_group.add_option("--z3str2",
                           dest="solver",
                           action="store_const",
                           const="z3str2",
                           help="Use the Z3-str2 SMT solver instead of Z3")
    setup_group.add_option("--z3",
                           dest="solver",
                           action="store_const",
                           const="z3",
                           help="Use the Z3 SMT solver")
    setup_group.add_option(
        "--multi",
        dest="solver",
        action="store_const",
        const="multi",
        help="Use as many different solvers as possible simultaneously")
    parser.add_option_group(setup_group)

    # Configuration
    configuration_group = OptionGroup(parser, "Exploration Configuration")
    configuration_group.add_option(
        "-n",
        "--workers",
        dest="workers",
        type="int",
        help="Run specified number of solvers in parallel",
        default=1)
    configuration_group.add_option(
        "-p",
        "--scheduling-policy",
        dest="scheduling_policy",
        type="str",
        help=
        "The name of the scheduling policy used to assign solving jobs to solvers.",
        default="central_queue")
    parser.add_option_group(configuration_group)

    # Input Detection
    input_detection_group = OptionGroup(parser, "Input Detection")
    input_detection_group.add_option("--argparse",
                                     dest="loader",
                                     action="store_const",
                                     const='argparse')
    input_detection_group.add_option("--sysargv",
                                     dest="loader",
                                     action="store_const",
                                     const='sysargv')
    input_detection_group.add_option("--optparse",
                                     dest="loader",
                                     action="store_const",
                                     const='optparse')
    parser.add_option_group(input_detection_group)

    # Limits
    limits_group = OptionGroup(parser, "Exploration Limits")
    limits_group.add_option(
        "-t",
        "--exploration-timeout",
        dest="explorationtimeout",
        type="int",
        help="Time in seconds to terminate the concolic execution",
        default=None)
    limits_group.add_option(
        "--solve-timeout",
        dest="solvetimeouts",
        action='append',
        type=float,
        help="Time in seconds to terminate a query to the SMT",
        default=None)
    limits_group.add_option(
        "--path-timeout",
        dest="pathtimeout",
        type="int",
        help="Maximum solving time to traverse down a single path",
        default=None)
    limits_group.add_option(
        "-b",
        "--coverage-pruning",
        dest="coverage_pruning",
        type="int",
        help=
        "Prune paths after no coverage increase for the specified number of inputs generated.",
        default=None)
    limits_group.add_option("-m",
                            "--max-iters",
                            dest="max_iters",
                            type="int",
                            help="Run specified number of iterations",
                            default=0)
    parser.add_option_group(limits_group)

    # Serialization and Logging
    logging_group = OptionGroup(parser, "Serialization and Logging")
    logging_group.add_option("-l",
                             "--log",
                             dest="logfile",
                             action="store",
                             help="Save log output to a file",
                             default="")
    logging_group.add_option(
        "-g",
        "--graph",
        dest="execution_graph",
        action="store",
        help="The file to save the serialized execution tree")
    logging_group.add_option(
        "-d",
        "--dot",
        dest="dot_graph",
        action="store",
        help="The file to save a DOT graph of execution tree")
    logging_group.add_option(
        "-q",
        "--query-store",
        dest="query_store",
        type="str",
        help="The folder to store generated and the execution graph, "
        "currently only CVC supports full query serialization")
    logging_group.add_option('--debug',
                             help="Enables debugging output.",
                             action="store_true",
                             dest="debug")
    parser.add_option_group(logging_group)

    (options, args) = parser.parse_args()

    debuglogging = options.debug

    if debuglogging:
        loglevel = logging.DEBUG
    else:
        loglevel = logging.INFO

    if not (options.logfile == ""):
        logging.basicConfig(filename=options.logfile,
                            level=loglevel,
                            format='%(asctime)s\t%(levelname)s\t%(message)s',
                            datefmt='%m/%d/%Y %I:%M:%S %p')

    if len(args) == 0 or not os.path.exists(args[0]):
        parser.error("Missing app to execute")
        sys.exit(1)

    solver = options.solver if options.solver is not None else "z3"
    solvetimeouts = options.solvetimeouts
    query_store = options.query_store
    scheduling_policy = options.scheduling_policy
    starttime_cpu = time.process_time()
    starttime_wall = time.time()
    filename = os.path.abspath(args[0])

    # Get the object describing the application
    app = loaderFactory(filename, options.entry, loader=options.loader)
    if app is None:
        sys.exit(1)

    print("Exploring " + app.filename + "." + app.entrypoint)

    engine = ExplorationEngine(app.createInvocation(),
                               solver=solver,
                               query_store=query_store,
                               solvetimeouts=solvetimeouts,
                               workers=options.workers,
                               scheduling_policy=scheduling_policy,
                               pathtimeout=options.pathtimeout,
                               coverage_pruning=options.coverage_pruning)
    generatedInputs, return_values, path = engine.explore(
        options.max_iters, options.explorationtimeout)
    # check the result
    result = app.execution_complete(return_values)
    endtime_cpu = time.process_time()
    endtime_wall = time.time()
    print("Execution time: {0:.2f} seconds".format(endtime_wall -
                                                   starttime_wall))
    print("Solver CPU: {0:.2f} seconds".format(engine.total_solve_time))
    instrumentation_time = endtime_cpu - starttime_cpu
    print("Instrumentation CPU: {0:.2f} seconds".format(instrumentation_time))
    print("Path coverage: {} paths".format(len(generatedInputs)))
    total_lines, executed_lines, executed_branches = engine.coverage_statistics(
    )
    print("Line coverage: {}/{} lines ({:.2%})".format(
        executed_lines, total_lines,
        (executed_lines / total_lines) if total_lines > 0 else 0))
    print("Branch coverage: {} branches".format(executed_branches))
    print("Exceptions: {} exceptions raised".format(
        len({
            e
            for e in return_values
            if isinstance(e, Exception) and hasattr(e, 'id')
        })))
    print("Triaged exceptions: {} triaged exceptions raised".format(
        len({
            e.id
            for e in return_values
            if isinstance(e, Exception) and hasattr(e, 'id')
        })))
    # output DOT graph
    if options.dot_graph is not None:
        with open(options.dot_graph, "w") as f:
            f.write(path.toDot())

    # output serialized exploration graph
    if options.execution_graph is not None:
        pickle.dump(path, open(options.execution_graph, "wb"))

    if not result and options.loader == None:
        sys.exit(1)
Beispiel #55
0
def user_agent():
    # type: () -> str
    """
    Return a string representing the user agent.
    """
    data = {
        "installer": {
            "name": "pip",
            "version": __version__
        },
        "python": platform.python_version(),
        "implementation": {
            "name": platform.python_implementation(),
        },
    }  # type: Dict[str, Any]

    if data["implementation"]["name"] == 'CPython':
        data["implementation"]["version"] = platform.python_version()
    elif data["implementation"]["name"] == 'PyPy':
        pypy_version_info = sys.pypy_version_info  # type: ignore
        if pypy_version_info.releaselevel == 'final':
            pypy_version_info = pypy_version_info[:3]
        data["implementation"]["version"] = ".".join(
            [str(x) for x in pypy_version_info])
    elif data["implementation"]["name"] == 'Jython':
        # Complete Guess
        data["implementation"]["version"] = platform.python_version()
    elif data["implementation"]["name"] == 'IronPython':
        # Complete Guess
        data["implementation"]["version"] = platform.python_version()

    if sys.platform.startswith("linux"):
        from pip._vendor import distro

        # https://github.com/nir0s/distro/pull/269
        linux_distribution = distro.linux_distribution()  # type: ignore
        distro_infos = dict(
            filter(
                lambda x: x[1],
                zip(["name", "version", "id"], linux_distribution),
            ))
        libc = dict(
            filter(
                lambda x: x[1],
                zip(["lib", "version"], libc_ver()),
            ))
        if libc:
            distro_infos["libc"] = libc
        if distro_infos:
            data["distro"] = distro_infos

    if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
        data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}

    if platform.system():
        data.setdefault("system", {})["name"] = platform.system()

    if platform.release():
        data.setdefault("system", {})["release"] = platform.release()

    if platform.machine():
        data["cpu"] = platform.machine()

    if has_tls():
        import _ssl as ssl
        data["openssl_version"] = ssl.OPENSSL_VERSION

    setuptools_dist = get_default_environment().get_distribution("setuptools")
    if setuptools_dist is not None:
        data["setuptools_version"] = str(setuptools_dist.version)

    if shutil.which("rustc") is not None:
        # If for any reason `rustc --version` fails, silently ignore it
        try:
            rustc_output = subprocess.check_output(["rustc", "--version"],
                                                   stderr=subprocess.STDOUT,
                                                   timeout=.5)
        except Exception:
            pass
        else:
            if rustc_output.startswith(b"rustc "):
                # The format of `rustc --version` is:
                # `b'rustc 1.52.1 (9bc8c42bb 2021-05-09)\n'`
                # We extract just the middle (1.52.1) part
                data["rustc_version"] = rustc_output.split(b" ")[1].decode()

    # Use None rather than False so as not to give the impression that
    # pip knows it is not being run under CI.  Rather, it is a null or
    # inconclusive result.  Also, we include some value rather than no
    # value to make it easier to know that the check has been run.
    data["ci"] = True if looks_like_ci() else None

    user_data = os.environ.get("PIP_USER_AGENT_USER_DATA")
    if user_data is not None:
        data["user_data"] = user_data

    return "{data[installer][name]}/{data[installer][version]} {json}".format(
        data=data,
        json=json.dumps(data, separators=(",", ":"), sort_keys=True),
    )
# coding: utf-8
"""Functional tests for MIMEEval Plugin"""

from __future__ import absolute_import

import sys
import unittest
import platform

import tests.util

IS_PYPY3 = ("pypy" in platform.python_implementation().lower()
            and sys.version_info.major == 3)

PRE_CONFIG = """
loadplugin     Mail::SpamAssassin::Plugin::MIMEEval

report _SCORE_
report _TESTS_
"""

# Define rules for plugin
CONFIG = """
body CHECK_MIME_BASE64_COUNT                     eval:check_for_mime("mime_base64_count")   
body CHECK_MIME_BASE64_ENCODED_TEXT              eval:check_for_mime("mime_base64_encoded_text")
body CHECK_MIME_BODY_HTML_COUNT                  eval:check_for_mime("mime_body_html_count")
body CHECK_MIME_BODY_TEXT_COUNT                  eval:check_for_mime("mime_body_text_count")
body CHECK_MIME_FARAWAY_CHARSET                  eval:check_for_mime("mime_faraway_charset")
body CHECK_MIME_MISSING_BOUNDARY                 eval:check_for_mime("mime_missing_boundary")
body CHECK_MIME_MULTIPART_ALTERNATIVE            eval:check_for_mime("mime_multipart_alternative")
body CHECK_MIME_MULTIPART_RATIO                  eval:check_for_mime("mime_multipart_ratio")
Beispiel #57
0
    from pydicom.valuerep import PersonName3 as PersonNameUnicode

    PersonName = PersonNameUnicode
else:
    from pydicom.valuerep import (PersonName, PersonNameUnicode)

try:
    import cPickle as pickle
except ImportError:
    import pickle

badvr_name = get_testdata_files("badVR.dcm")[0]
default_encoding = 'iso8859'


@pytest.mark.skipif(platform.python_implementation() == 'PyPy',
                    reason="PyPy has trouble with this pickle")
class TestTM(object):
    """Unit tests for pickling TM"""
    def test_pickling(self):
        # Check that a pickled TM is read back properly
        x = pydicom.valuerep.TM("212223")
        x.original_string = 'hello'
        assert 'hello' == x.original_string
        assert time(21, 22, 23) == x
        data1_string = pickle.dumps(x)
        x2 = pickle.loads(data1_string)
        assert x == x2
        assert x.original_string == x2.original_string
        assert str(x) == str(x2)
Beispiel #58
0
 def _get_software_info(self):
     return {"python":{"version": pf.python_version(),
                       "implementation": pf.python_implementation()}
             }
Beispiel #59
0
import dramatiq
import platform
import pytest
import time

from dramatiq import Message, Middleware, Worker
from dramatiq.middleware import SkipMessage

_current_platform = platform.python_implementation()


def test_actors_can_be_defined(stub_broker):
    # Given that I've decorated a function with @actor
    @dramatiq.actor
    def add(x, y):
        return x + y

    # I expect that function to become an instance of Actor
    assert isinstance(add, dramatiq.Actor)


def test_actors_can_be_assigned_predefined_options(stub_broker):
    # Given that I have a stub broker with the retries middleware
    # If I define an actor with a max_retries number
    @dramatiq.actor(max_retries=32)
    def add(x, y):
        return x + y

    # I expect the option to persist
    assert add.options["max_retries"] == 32
Beispiel #60
0
        sys.exit(
            "The folder you are executing pip from can no longer be found."
        )

# under macOS + virtualenv sys.prefix is not properly resolved
# it is something like /path/to/python/bin/..
# Note: using realpath due to tmp dirs on OSX being symlinks
src_prefix = os.path.abspath(src_prefix)

# FIXME doesn't account for venv linked to global site-packages

site_packages = sysconfig.get_path("purelib")
# This is because of a bug in PyPy's sysconfig module, see
# https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths
# for more information.
if platform.python_implementation().lower() == "pypy":
    site_packages = distutils_sysconfig.get_python_lib()
try:
    # Use getusersitepackages if this is present, as it ensures that the
    # value is initialised properly.
    user_site = site.getusersitepackages()
except AttributeError:
    user_site = site.USER_SITE
user_dir = expanduser('~')
if WINDOWS:
    bin_py = os.path.join(sys.prefix, 'Scripts')
    bin_user = os.path.join(user_site, 'Scripts')
    # buildout uses 'bin' on Windows too?
    if not os.path.exists(bin_py):
        bin_py = os.path.join(sys.prefix, 'bin')
        bin_user = os.path.join(user_site, 'bin')