"""API properties. """ from __future__ import print_function from __future__ import unicode_literals from tabulate import tabulate, tabulate_formats, simple_separated_format from platform import python_version_tuple from common import SkipTest try: if python_version_tuple() >= ('3', '3', '0'): from inspect import signature, _empty except ImportError: signature = None _empty = None def test_tabulate_formats(): "API: tabulate_formats is a list of strings" "" supported = tabulate_formats print("tabulate_formats = %r" % supported) assert type(supported) is list for fmt in supported: assert type(fmt) is type("") def _check_signature(function, expected_sig): if not signature: raise SkipTest() actual_sig = signature(function)
# pdftoppm fails? Current code doesn't seem to. Note gs needs to be findable on PATH, # but it gets placed in C:\Program Files\gs\gs9.27\bin\gswin64c.exe (for example). temp_file_prefix = "pdfCropMarginsTmp_" # prefixed to all temporary filenames temp_dir_prefix = "pdfCropMarginsTmpDir_" # prefixed to all temporary dirnames cygwin_full_path_prefix = "/cygdrive" ## ## Get info about the OS we're running on. ## import platform # Get the version as a tuple of strings: (major, minor, patchlevel) python_version = platform.python_version_tuple() # sys.version_info works too # Get the system OS type from platform.system as a string such as "Linux", # "Windows", or "CYGWIN*". Note that os.name instead returns something like # "nt" for Windows and "posix" for Linux and Cygwin. system_os = platform.system() if system_os[:6].lower() == "cygwin": system_os = "Cygwin" # TODO: Below line seems no longer necessary; the condition is not checked. # Either check again or update the man page. #system_os = "Windows" # Uncomment ONLY to test Windows on Linux with Wine. # Find the number of bits the OS supports. if sys.maxsize > 2**32: system_bits = 64 # Supposed to work on Macs, too.
#!/usr/bin/env python3 # -*- coding: utf8 -*- from __future__ import print_function from getpass import getpass from argparse import ArgumentParser from os import chdir, path, makedirs, pardir, environ from subprocess import call, Popen from functools import partial from platform import python_version_tuple from github import Github if python_version_tuple()[0] == u'2': input = lambda prompt: raw_input(prompt.encode('utf8')).decode('utf8') __author__ = u'"Mustafa Hasturk"' __version__ = '2.1.0' class Gitim(): def __init__(self): print(u""" .--. .--. __ __ ___ .--./) |__| |__|| |/ `.' `. /.''\\ .--. .| .--.| .-. .-. ' | | | | | | .' |_ | || | | | | | \`-' / | | .' || || | | | | | /("'` | |'--. .-'| || | | | | | \ '---. | | | | | || | | | | | /'""'.\|__| | | |__||__| |__| |__|
def is_python_3(): major, _, _ = platform.python_version_tuple() return major == '3'
def get_buffer(encoding): if platform.python_version_tuple()[0] == '3': return vim.current.buffer return [line.decode(encoding) for line in vim.current.buffer]
from platform import python_implementation, python_version_tuple print(python_implementation()) for atr in python_version_tuple(): print(atr)
# coding: utf-8 import requests import platform from numbers import Number import xml.etree.cElementTree as xml from collections import namedtuple from six import string_types py_majversion, py_minversion, py_revversion = platform.python_version_tuple() if py_majversion == '2': from httplib import responses as HTTP_CODES from urlparse import urlparse from urlparse import urlsplit else: from http.client import responses as HTTP_CODES from urllib.parse import urlparse from urllib.parse import urlsplit DOWNLOAD_CHUNK_SIZE_BYTES = 1 * 1024 * 1024 AUTH_MODE_BASIC = 'basic' AUTH_MODE_DIGEST = 'digest' CONTENT_TYPE_DIRECTORY = 'httpd/unix-directory' class WebdavException(Exception): pass
""" browser-history's setup. browser-history is a simple, zero-dependencies, developer-friendly python package to retrieve (almost) any browser's history on (almost) any platform. See https://browser-history.readthedocs.io/en/stable/ for more help. """ try: import setuptools except ImportError: raise RuntimeError( "Could not install browser-history in the environment as setuptools " "is missing. Please create a new virtual environment before proceeding" ) import platform MIN_PYTHON_VERSION = ("3", "6") if platform.python_version_tuple() < MIN_PYTHON_VERSION: raise SystemExit( "Could not install browser-history in the environment. The" " browser-history package requires python version 3.6+, you are using " f"{platform.python_version()}") if __name__ == "__main__": setuptools.setup()
#!/usr/bin/env python3 from importlib import import_module import platform import sys if __name__ == '__main__': (major, minor, patch) = platform.python_version_tuple() if int(major) < 3: sys.stderr.write( 'This requires python 3, you are running %s.%s.%s.\n' % (major, minor, patch)) sys.exit(1) runner = import_module('test.runner') runner.main()
def get_machine_id(): return "%s-%s-%s-%s" % ( platform.system(), platform.python_implementation(), ".".join( platform.python_version_tuple()[:2]), platform.architecture()[0])
return False else: return True # depend in Pillow if it is installed, otherwise # depend on PIL if it is installed, otherwise # require Pillow if package_installed('Pillow'): install_requires.append('Pillow !=2.4.0,!=8.3.0,!=8.3.1') elif package_installed('PIL'): install_requires.append('PIL>=1.1.6,<1.2.99') else: install_requires.append('Pillow !=2.4.0,!=8.3.0,!=8.3.1') if platform.python_version_tuple() < ('2', '6'): # for mapproxy-seed install_requires.append('multiprocessing>=2.6') def long_description(changelog_releases=10): import re import textwrap readme = open('README.rst').read() changes = ['Changes\n-------\n'] version_line_re = re.compile(r'^\d\.\d+\.\d+\S*\s20\d\d-\d\d-\d\d') for line in open('CHANGES.txt'): if version_line_re.match(line): if changelog_releases == 0: break
def start(): pyv = python_version_tuple() if pyv[0] != '3': print('Need python 3 for execute this script') sys.exit(1) parser = argparse.ArgumentParser( description= 'An Tool for add new servers config to your protozoo setup. In future versions you can create servers using this tool' ) parser.add_argument( '--ip_range', help= 'A range of ip\'s for the servers in format 192.168.1.5-192.168.1.33.') parser.add_argument('--ip_list', help='A list of ip\'s of new servers separed by ,') parser.add_argument('--remove_ip', help='If true, the ip list is used for delete servers', required=False, nargs='?', const='1') parser.add_argument('--os', help='The operating system of new servers', required=True) parser.add_argument('--domainname', help='The domain name of new servers', required=True) parser.add_argument('--type', help='The type of servers', required=True) parser.add_argument('--save_in_db', help='Save in a database (you need special config)', required=False, nargs='?', const='1') parser.add_argument('--profile', help='The profile where the servers are saved', required=True) args = parser.parse_args() if args.ip_range == None and args.ip_list == None: parser.error('You need --ip_range or --ip_list options') arr_ip = [] if args.ip_range is not None: range_ips = args.ip_range.split('-') try: ipaddress.ip_address(range_ips[0]) except: parser.error('First element of ip range is not valid IPv4 or IPv6') try: ipaddress.ip_address(range_ips[1]) except: parser.error( 'Second element of ip range is not valid IPv4 or IPv6') for ipaddr in ipaddress.summarize_address_range( ipaddress.ip_address(range_ips[0]), ipaddress.ip_address(range_ips[1])): for ip in ipaddr: arr_ip.append(ip) elif args.ip_list is not None: ip_list = args.ip_list.split(',') for ip in ip_list: arr_ip.append(ipaddress.ip_address(ip)) if len(arr_ip) > 0: #Save if args.save_in_db == None: print('Saving new servers in file...') old_servers = {} check_old = 0 try: servers = import_module('settings.' + args.profile) for k, server in enumerate(servers.servers): old_servers[server['ip']] = 1 check_old = 1 except: pass new_file = 'settings/' + args.profile + '.py' file_txt = "#!/usr/bin/python3\n" file_txt += "servers=[]\n" prefix = '' if args.profile != None: prefix = "-" + args.profile.replace('.', '-') for ip in arr_ip: old_servers[str(ip)] = old_servers.get(str(ip), 0) if old_servers[str(ip)] == 0: hostname = str(ip).replace( '.', '') + prefix + '.' + args.domainname file_txt += "servers.append({'hostname': '" + hostname + "', 'os_codename': '" + str( args.os) + "', 'ip': '" + str( ip) + "', 'name': '" + str(hostname).replace( '.', '_') + "'})\n" elif args.remove_ip == '1': old_servers[str(ip)] = 0 pass #Add old servers if check_old == 1: for server in servers.servers: if old_servers[server['ip']] == 1: file_txt += "servers.append({'hostname': '" + server[ 'hostname'] + "', 'os_codename': '" + server[ 'os_codename'] + "', 'ip': '" + server[ 'ip'] + "', 'name': '" + server[ 'name'] + "'})\n" #Save file file = open(new_file, 'w+') file.write(file_txt) file.close() else: #Import settings for db print('Saving new servers in database...') new_file = 'settings/' + args.profile + '.py' try: config_db = import_module('settings.config') except: print( 'You need a configuration file called config.py for use a database for save servers data. Also you need cromosoma module installed' ) print("Exception in user code:") print("-" * 60) traceback.print_exc(file=sys.stdout) print("-" * 60) exit(1) pass #Load model model = import_module('modules.pastafari.models.servers') server = model.Server() if args.remove_ip == '1': for ip in arr_ip: server.conditions = ['WHERE ip=%s', [str(ip)]] server.delete() else: if args.profile != None: prefix = "-" + args.profile.replace('.', '-') for ip in arr_ip: #file_txt+="servers.append({'hostname': '"+hostname+"', 'os_codename': '"+str(args.os)+"', 'ip': '"+str(ip)+"', 'name': '"+str(hostname).replace('.', '_')+"'})\n" #Check if server exists in this profile server.conditions = [ 'WHERE ip=%s and profile=%s', [str(ip), args.profile] ] num_server = server.select_count() if num_server == 0: #if args.type!=None: # type_server="-"+args.type.replace('.', '-') hostname = str(ip).replace( '.', '') + prefix + '.' + args.domainname arr_server = { 'hostname': hostname, 'os_codename': str(args.os), 'ip': str(ip), 'name': str(hostname).replace('.', '_'), 'type': args.type, 'profile': args.profile } server.create_forms() server.insert(arr_server) #Save file file_txt = "#!/usr/bin/python3\n\n" file_txt += "from modules.pastafari.models.servers import Server\n\n" file_txt += "from settings import config\n\n" file_txt += "server=Server()\n\n" file_txt += "servers=[]\n\n" file_txt += "server.conditions='[where type=%s and profile=%s', ['" + args.type + "', '" + args.profile + "']]\n\n" #args.type args.profile file_txt += "cur=server.select()\n\n" file_txt += "for row in cur:\n" file_txt += " servers.append(row)\n" file = open(new_file, 'w+') file.write(file_txt) file.close()
>>>>>>> 7e5c5fbd6c824de4d4c2b62da3f7cae87d462119 return { "implementation_name": implementation_name, "implementation_version": iver, "os_name": os.name, "platform_machine": platform.machine(), "platform_release": platform.release(), "platform_system": platform.system(), "platform_version": platform.version(), "python_full_version": platform.python_version(), "platform_python_implementation": platform.python_implementation(), <<<<<<< HEAD "python_version": platform.python_version()[:3], ======= "python_version": ".".join(platform.python_version_tuple()[:2]), >>>>>>> 7e5c5fbd6c824de4d4c2b62da3f7cae87d462119 "sys_platform": sys.platform, } class Marker(object): <<<<<<< HEAD def __init__(self, marker): ======= def __init__(self, marker): # type: (str) -> None >>>>>>> 7e5c5fbd6c824de4d4c2b62da3f7cae87d462119 try: self._markers = _coerce_parse_result(MARKER.parseString(marker))
class TestResources(unittest.TestCase): @classmethod def setUpClass(cls): cls.schema_class = XMLSchema10 cls.vh_dir = casepath('examples/vehicles') cls.vh_xsd_file = casepath('examples/vehicles/vehicles.xsd') cls.vh_xml_file = casepath('examples/vehicles/vehicles.xml') cls.col_dir = casepath('examples/collection') cls.col_xsd_file = casepath('examples/collection/collection.xsd') cls.col_xml_file = casepath('examples/collection/collection.xml') def check_url(self, url, expected): url_parts = urlsplit(url) if urlsplit(expected).scheme not in uses_relative: expected = add_leading_slash(expected) expected_parts = urlsplit(expected, scheme='file') self.assertEqual(url_parts.scheme, expected_parts.scheme, "%r: Schemes differ." % url) self.assertEqual(url_parts.netloc, expected_parts.netloc, "%r: Netloc parts differ." % url) self.assertEqual(url_parts.query, expected_parts.query, "%r: Query parts differ." % url) self.assertEqual(url_parts.fragment, expected_parts.fragment, "%r: Fragment parts differ." % url) if is_windows_path(url_parts.path) or is_windows_path( expected_parts.path): path = PureWindowsPath(filter_windows_path(url_parts.path)) expected_path = PureWindowsPath( filter_windows_path(expected_parts.path)) else: path = PurePath(url_parts.path) expected_path = PurePath(expected_parts.path) self.assertEqual(path, expected_path, "%r: Paths differ." % url) def test_url_helper_functions(self): self.assertTrue(is_url(self.col_xsd_file)) self.assertFalse(is_url(' \t<root/>')) self.assertFalse(is_url('line1\nline2')) self.assertFalse(is_url(None)) self.assertTrue(is_local_url(self.col_xsd_file)) self.assertTrue(is_local_url('/home/user/')) self.assertTrue(is_local_url('/home/user/schema.xsd')) self.assertTrue(is_local_url(' /home/user/schema.xsd ')) self.assertTrue(is_local_url('C:\\Users\\foo\\schema.xsd')) self.assertTrue(is_local_url(' file:///home/user/schema.xsd')) self.assertFalse(is_local_url('http://example.com/schema.xsd')) self.assertFalse(is_remote_url(self.col_xsd_file)) self.assertFalse(is_remote_url('/home/user/')) self.assertFalse(is_remote_url('/home/user/schema.xsd')) self.assertFalse(is_remote_url(' file:///home/user/schema.xsd')) self.assertTrue(is_remote_url(' http://example.com/schema.xsd')) self.assertTrue(url_path_is_file(self.col_xml_file)) self.assertFalse(url_path_is_file(self.col_dir)) def test_normalize_url_posix(self): url1 = "https://example.com/xsd/other_schema.xsd" self.check_url( normalize_url(url1, base_url="/path_my_schema/schema.xsd"), url1) parent_dir = os.path.dirname(os.getcwd()) self.check_url(normalize_url('../dir1/./dir2'), os.path.join(parent_dir, 'dir1/dir2')) self.check_url( normalize_url('../dir1/./dir2', '/home', keep_relative=True), 'file:///dir1/dir2') self.check_url(normalize_url('../dir1/./dir2', 'file:///home'), 'file:///dir1/dir2') self.check_url(normalize_url('other.xsd', 'file:///home'), 'file:///home/other.xsd') self.check_url(normalize_url('other.xsd', 'file:///home/'), 'file:///home/other.xsd') self.check_url(normalize_url('file:other.xsd', 'file:///home'), 'file:///home/other.xsd') cwd = os.getcwd() cwd_url = 'file://{}/'.format(cwd) if cwd.startswith( '/') else 'file:///{}/'.format(cwd) self.check_url(normalize_url('file:other.xsd', keep_relative=True), 'file:other.xsd') self.check_url(normalize_url('file:other.xsd'), cwd_url + 'other.xsd') self.check_url( normalize_url('file:other.xsd', 'http://site/base', True), 'file:other.xsd') self.check_url(normalize_url('file:other.xsd', 'http://site/base'), cwd_url + 'other.xsd') self.check_url(normalize_url('dummy path.xsd'), cwd_url + 'dummy path.xsd') self.check_url(normalize_url('dummy path.xsd', 'http://site/base'), 'http://site/base/dummy%20path.xsd') self.check_url(normalize_url('dummy path.xsd', 'file://host/home/'), 'file://host/home/dummy path.xsd') def test_normalize_url_windows(self): win_abs_path1 = 'z:\\Dir_1_0\\Dir2-0\\schemas/XSD_1.0/XMLSchema.xsd' win_abs_path2 = 'z:\\Dir-1.0\\Dir-2_0\\' self.check_url(normalize_url(win_abs_path1), win_abs_path1) self.check_url(normalize_url('k:\\Dir3\\schema.xsd', win_abs_path1), 'file:///k:\\Dir3\\schema.xsd') self.check_url(normalize_url('k:\\Dir3\\schema.xsd', win_abs_path2), 'file:///k:\\Dir3\\schema.xsd') self.check_url(normalize_url('schema.xsd', win_abs_path2), 'file:///z:\\Dir-1.0\\Dir-2_0/schema.xsd') self.check_url(normalize_url('xsd1.0/schema.xsd', win_abs_path2), 'file:///z:\\Dir-1.0\\Dir-2_0/xsd1.0/schema.xsd') self.check_url(normalize_url('file:///\\k:\\Dir A\\schema.xsd'), 'file:///k:\\Dir A\\schema.xsd') def test_normalize_url_slashes(self): # Issue #116 self.assertEqual( normalize_url('//anaconda/envs/testenv/lib/python3.6/' 'site-packages/xmlschema/validators/schemas/'), 'file:///anaconda/envs/testenv/lib/python3.6/' 'site-packages/xmlschema/validators/schemas/') self.assertEqual(normalize_url('/root/dir1/schema.xsd'), 'file:///root/dir1/schema.xsd') self.assertEqual(normalize_url('//root/dir1/schema.xsd'), 'file:///root/dir1/schema.xsd') self.assertEqual(normalize_url('////root/dir1/schema.xsd'), 'file:///root/dir1/schema.xsd') self.assertEqual(normalize_url('dir2/schema.xsd', '//root/dir1/'), 'file:///root/dir1/dir2/schema.xsd') self.assertEqual(normalize_url('dir2/schema.xsd', '//root/dir1'), 'file:///root/dir1/dir2/schema.xsd') self.assertEqual(normalize_url('dir2/schema.xsd', '////root/dir1'), 'file:///root/dir1/dir2/schema.xsd') def test_normalize_url_hash_character(self): self.check_url(normalize_url('issue #000.xml', 'file:///dir1/dir2/'), 'file:///dir1/dir2/issue %23000.xml') self.check_url( normalize_url('data.xml', 'file:///dir1/dir2/issue 000'), 'file:///dir1/dir2/issue 000/data.xml') self.check_url(normalize_url('data.xml', '/dir1/dir2/issue #000'), '/dir1/dir2/issue %23000/data.xml') def test_fetch_resource(self): wrong_path = casepath('resources/dummy_file.txt') self.assertRaises(XMLSchemaResourceError, fetch_resource, wrong_path) right_path = casepath('resources/dummy file.txt') self.assertTrue(fetch_resource(right_path).endswith('dummy file.txt')) ambiguous_path = casepath('resources/dummy file #2.txt') self.assertTrue( fetch_resource(ambiguous_path).endswith('dummy file %232.txt')) res = urlopen(fetch_resource(ambiguous_path)) try: self.assertEqual(res.read(), b'DUMMY CONTENT') finally: res.close() def test_fetch_namespaces(self): self.assertFalse(fetch_namespaces(casepath('resources/malformed.xml'))) def test_fetch_schema_locations(self): locations = fetch_schema_locations(self.col_xml_file) self.check_url(locations[0], self.col_xsd_file) self.assertEqual(locations[1][0][0], 'http://example.com/ns/collection') self.check_url(locations[1][0][1], self.col_xsd_file) self.check_url(fetch_schema(self.vh_xml_file), self.vh_xsd_file) def test_get_context(self): source, schema = get_context(self.col_xml_file) self.assertIsInstance(source, XMLResource) self.assertIsInstance(schema, XMLSchema) source, schema = get_context(self.col_xml_file, self.col_xsd_file) self.assertIsInstance(source, XMLResource) self.assertIsInstance(schema, XMLSchema) source, schema = get_context(self.vh_xml_file, cls=XMLSchema10) self.assertIsInstance(source, XMLResource) self.assertIsInstance(schema, XMLSchema10) source, schema = get_context(self.col_xml_file, cls=XMLSchema11) self.assertIsInstance(source, XMLResource) self.assertIsInstance(schema, XMLSchema11) source, schema = get_context(XMLResource(self.vh_xml_file)) self.assertIsInstance(source, XMLResource) self.assertIsInstance(schema, XMLSchema) # Issue #145 with open(self.vh_xml_file) as f: source, schema = get_context(f, schema=self.vh_xsd_file) self.assertIsInstance(source, XMLResource) self.assertIsInstance(schema, XMLSchema) with open(self.vh_xml_file) as f: source, schema = get_context(XMLResource(f), schema=self.vh_xsd_file) self.assertIsInstance(source, XMLResource) self.assertIsInstance(schema, XMLSchema) with open(self.vh_xml_file) as f: source, schema = get_context(f, base_url=self.vh_dir) self.assertIsInstance(source, XMLResource) self.assertIsInstance(schema, XMLSchema) # Tests on XMLResource instances def test_xml_resource_from_url(self): resource = XMLResource(self.vh_xml_file) self.assertEqual(resource.source, self.vh_xml_file) self.assertEqual(resource.root.tag, '{http://example.com/vehicles}vehicles') self.check_url(resource.url, self.vh_xml_file) self.assertIsNone(resource.document) self.assertIsNone(resource.text) resource.load() self.assertTrue(resource.text.startswith('<?xml')) resource = XMLResource(self.vh_xml_file, lazy=False) self.assertEqual(resource.source, self.vh_xml_file) self.assertEqual(resource.root.tag, '{http://example.com/vehicles}vehicles') self.check_url(resource.url, self.vh_xml_file) self.assertIsInstance(resource.document, ElementTree.ElementTree) self.assertIsNone(resource.text) resource.load() self.assertTrue(resource.text.startswith('<?xml')) def test_xml_resource_from_element_tree(self): vh_etree = ElementTree.parse(self.vh_xml_file) vh_root = vh_etree.getroot() resource = XMLResource(vh_etree) self.assertEqual(resource.source, vh_etree) self.assertEqual(resource.document, vh_etree) self.assertEqual(resource.root.tag, '{http://example.com/vehicles}vehicles') self.assertIsNone(resource.url) self.assertIsNone(resource.text) resource.load() self.assertIsNone(resource.text) resource = XMLResource(vh_root) self.assertEqual(resource.source, vh_root) self.assertIsInstance(resource.document, ElementTree.ElementTree) self.assertEqual(resource.root.tag, '{http://example.com/vehicles}vehicles') self.assertIsNone(resource.url) self.assertIsNone(resource.text) resource.load() self.assertIsNone(resource.text) @unittest.skipIf(lxml_etree is None, "Skip: lxml is not available.") def test_xml_resource_from_lxml(self): vh_etree = lxml_etree.parse(self.vh_xml_file) vh_root = vh_etree.getroot() resource = XMLResource(vh_etree) self.assertEqual(resource.source, vh_etree) self.assertEqual(resource.document, vh_etree) self.assertEqual(resource.root.tag, '{http://example.com/vehicles}vehicles') self.assertIsNone(resource.url) self.assertIsNone(resource.text) resource.load() self.assertIsNone(resource.text) resource = XMLResource(vh_root) self.assertEqual(resource.source, vh_root) self.assertEqual(resource.root.tag, '{http://example.com/vehicles}vehicles') self.assertIsNone(resource.url) self.assertIsNone(resource.text) resource.load() self.assertIsNone(resource.text) @unittest.skipIf(platform.python_version_tuple()[0] < '3', "Skip: urlopen on Python 2 can't seek 'file://' paths.") def test_xml_resource_from_resource(self): xml_file = urlopen('file://{}'.format( add_leading_slash(self.vh_xml_file))) try: resource = XMLResource(xml_file) self.assertEqual(resource.source, xml_file) self.assertEqual(resource.root.tag, '{http://example.com/vehicles}vehicles') self.assertIsNone(resource.url) self.assertIsNone(resource.document) self.assertIsNone(resource.text) resource.load() self.assertTrue(resource.text.startswith('<?xml')) self.assertFalse(xml_file.closed) finally: xml_file.close() def test_xml_resource_from_file(self): with open(self.vh_xsd_file) as schema_file: resource = XMLResource(schema_file) self.assertEqual(resource.source, schema_file) self.assertEqual(resource.root.tag, '{http://www.w3.org/2001/XMLSchema}schema') self.assertIsNone(resource.url) self.assertIsNone(resource.document) self.assertIsNone(resource.text) resource.load() self.assertTrue(resource.text.startswith('<xs:schema')) self.assertFalse(schema_file.closed) for _ in resource.iter(): pass self.assertFalse(schema_file.closed) for _ in resource.iter_subtrees(): pass self.assertFalse(schema_file.closed) with open(self.vh_xsd_file) as schema_file: resource = XMLResource(schema_file, lazy=False) self.assertEqual(resource.source, schema_file) self.assertEqual(resource.root.tag, '{http://www.w3.org/2001/XMLSchema}schema') self.assertIsNone(resource.url) self.assertIsInstance(resource.document, ElementTree.ElementTree) self.assertIsNone(resource.text) resource.load() self.assertTrue(resource.text.startswith('<xs:schema')) self.assertFalse(schema_file.closed) for _ in resource.iter(): pass self.assertFalse(schema_file.closed) for _ in resource.iter_subtrees(): pass self.assertFalse(schema_file.closed) def test_xml_resource_from_string(self): with open(self.vh_xsd_file) as schema_file: schema_text = schema_file.read() resource = XMLResource(schema_text) self.assertEqual(resource.source, schema_text) self.assertEqual(resource.root.tag, '{http://www.w3.org/2001/XMLSchema}schema') self.assertIsNone(resource.url) self.assertIsNone(resource.document) self.assertTrue(resource.text.startswith('<xs:schema')) invalid_xml = '<tns0:root>missing namespace declaration</tns0:root>' with self.assertRaises(ElementTree.ParseError) as ctx: XMLResource(invalid_xml) self.assertEqual(str(ctx.exception), 'unbound prefix: line 1, column 0') def test_xml_resource_from_string_io(self): with open(self.vh_xsd_file) as schema_file: schema_text = schema_file.read() schema_file = StringIO(schema_text) resource = XMLResource(schema_file) self.assertEqual(resource.source, schema_file) self.assertEqual(resource.root.tag, '{http://www.w3.org/2001/XMLSchema}schema') self.assertIsNone(resource.url) self.assertIsNone(resource.document) self.assertTrue(resource.text.startswith('<xs:schema')) schema_file = StringIO(schema_text) resource = XMLResource(schema_file, lazy=False) self.assertEqual(resource.source, schema_file) self.assertEqual(resource.root.tag, '{http://www.w3.org/2001/XMLSchema}schema') self.assertIsNone(resource.url) self.assertIsInstance(resource.document, ElementTree.ElementTree) self.assertTrue(resource.text.startswith('<xs:schema')) def test_xml_resource_from_wrong_type(self): self.assertRaises(TypeError, XMLResource, [b'<UNSUPPORTED_DATA_TYPE/>']) def test_xml_resource_namespace(self): resource = XMLResource(self.vh_xml_file) self.assertEqual(resource.namespace, 'http://example.com/vehicles') resource = XMLResource(self.vh_xsd_file) self.assertEqual(resource.namespace, 'http://www.w3.org/2001/XMLSchema') resource = XMLResource(self.col_xml_file) self.assertEqual(resource.namespace, 'http://example.com/ns/collection') self.assertEqual(XMLResource('<A/>').namespace, '') def test_xml_resource_access(self): resource = XMLResource(self.vh_xml_file) base_url = resource.base_url XMLResource(self.vh_xml_file, allow='local') XMLResource(self.vh_xml_file, base_url=os.path.dirname(self.vh_xml_file), allow='sandbox') with self.assertRaises(XMLSchemaResourceError) as ctx: XMLResource(self.vh_xml_file, allow='remote') self.assertTrue( str(ctx.exception).startswith("block access to local resource")) with self.assertRaises(XMLSchemaResourceError) as ctx: XMLResource("https://xmlschema.test/vehicles.xsd", allow='local') self.assertEqual( str(ctx.exception), "block access to remote resource https://xmlschema.test/vehicles.xsd" ) with self.assertRaises(XMLSchemaResourceError) as ctx: XMLResource("https://xmlschema.test/vehicles.xsd", allow='sandbox') self.assertEqual( str(ctx.exception), "block access to files out of sandbox requires 'base_url' to be set" ) with self.assertRaises(XMLSchemaResourceError) as ctx: XMLResource("/tmp/vehicles.xsd", allow='sandbox') self.assertEqual( str(ctx.exception), "block access to files out of sandbox requires 'base_url' to be set", ) source = "/tmp/vehicles.xsd" with self.assertRaises(XMLSchemaResourceError) as ctx: XMLResource(source, base_url=base_url, allow='sandbox') self.assertEqual( str(ctx.exception), "block access to out of sandbox file {}".format( normalize_url(source)), ) with self.assertRaises(TypeError) as ctx: XMLResource("https://xmlschema.test/vehicles.xsd", allow=None) self.assertEqual( str(ctx.exception), "invalid type <class 'NoneType'> for the attribute 'allow'") with self.assertRaises(ValueError) as ctx: XMLResource("https://xmlschema.test/vehicles.xsd", allow='any') self.assertEqual(str(ctx.exception), "'allow' attribute: 'any' is not a security mode") def test_xml_resource_defuse(self): resource = XMLResource(self.vh_xml_file, defuse='never') self.assertEqual(resource.defuse, 'never') self.assertRaises(ValueError, XMLResource, self.vh_xml_file, defuse='all') self.assertRaises(TypeError, XMLResource, self.vh_xml_file, defuse=None) self.assertIsInstance(resource.root, etree_element) resource = XMLResource(self.vh_xml_file, defuse='always') self.assertIsInstance(resource.root, py_etree_element) xml_file = casepath('resources/with_entity.xml') self.assertIsInstance(XMLResource(xml_file), XMLResource) self.assertRaises(PyElementTree.ParseError, XMLResource, xml_file, defuse='always') xml_file = casepath('resources/unused_external_entity.xml') self.assertIsInstance(XMLResource(xml_file), XMLResource) self.assertRaises(PyElementTree.ParseError, XMLResource, xml_file, defuse='always') xml_file = casepath('resources/external_entity.xml') self.assertIsInstance(XMLResource(xml_file), XMLResource) self.assertRaises(PyElementTree.ParseError, XMLResource, xml_file, defuse='always') def test_xml_resource_timeout(self): resource = XMLResource(self.vh_xml_file, timeout=30) self.assertEqual(resource.timeout, 30) self.assertRaises(TypeError, XMLResource, self.vh_xml_file, timeout='100') self.assertRaises(ValueError, XMLResource, self.vh_xml_file, timeout=0) def test_xml_resource_is_lazy(self): resource = XMLResource(self.vh_xml_file) self.assertTrue(resource.is_lazy()) resource = XMLResource(self.vh_xml_file, lazy=False) self.assertFalse(resource.is_lazy()) def test_xml_resource_is_loaded(self): resource = XMLResource(self.vh_xml_file) self.assertFalse(resource.is_loaded()) resource.load() self.assertTrue(resource.is_loaded()) def test_xml_resource_parse(self): resource = XMLResource(self.vh_xml_file) self.assertEqual(resource.defuse, 'remote') xml_document = resource.parse(self.col_xml_file) self.assertTrue(is_etree_element(xml_document.getroot())) resource.defuse = 'always' xml_document = resource.parse(self.col_xml_file) self.assertTrue(is_etree_element(xml_document.getroot())) def test_xml_resource_iterparse(self): resource = XMLResource(self.vh_xml_file) self.assertEqual(resource.defuse, 'remote') for _, elem in resource.iterparse(self.col_xml_file, events=('end', )): self.assertTrue(is_etree_element(elem)) resource.defuse = 'always' for _, elem in resource.iterparse(self.col_xml_file, events=('end', )): self.assertTrue(is_etree_element(elem)) def test_xml_resource_fromstring(self): resource = XMLResource(self.vh_xml_file) self.assertEqual(resource.defuse, 'remote') self.assertEqual(resource.fromstring('<root/>').tag, 'root') resource.defuse = 'always' self.assertEqual(resource.fromstring('<root/>').tag, 'root') def test_xml_resource_tostring(self): resource = XMLResource(self.vh_xml_file) self.assertTrue(resource.tostring().startswith('<vh:vehicles')) def test_xml_resource_copy(self): resource = XMLResource(self.vh_xml_file) resource2 = resource.copy(defuse='never') self.assertEqual(resource2.defuse, 'never') resource2 = resource.copy(timeout=30) self.assertEqual(resource2.timeout, 30) resource2 = resource.copy(lazy=False) self.assertFalse(resource2.is_lazy()) self.assertIsNone(resource2.text) self.assertIsNone(resource.text) resource.load() self.assertIsNotNone(resource.text) resource2 = resource.copy() self.assertEqual(resource.text, resource2.text) def test_xml_resource_open(self): resource = XMLResource(self.vh_xml_file) xml_file = resource.open() self.assertIsNot(xml_file, resource.source) data = xml_file.read().decode('utf-8') self.assertTrue(data.startswith('<?xml ')) xml_file.close() resource = XMLResource('<A/>') self.assertRaises(XMLSchemaResourceError, resource.open) resource = XMLResource(source=open(self.vh_xml_file)) xml_file = resource.open() self.assertIs(xml_file, resource.source) xml_file.close() def test_xml_resource_seek(self): resource = XMLResource(self.vh_xml_file) self.assertIsNone(resource.seek(0)) self.assertIsNone(resource.seek(1)) xml_file = open(self.vh_xml_file) resource = XMLResource(source=xml_file) self.assertEqual(resource.seek(0), 0) self.assertEqual(resource.seek(1), 1) xml_file.close() def test_xml_resource_close(self): resource = XMLResource(self.vh_xml_file) resource.close() xml_file = resource.open() self.assertTrue(callable(xml_file.read)) with open(self.vh_xml_file) as xml_file: resource = XMLResource(source=xml_file) resource.close() with self.assertRaises(ValueError): resource.open() def test_xml_resource_iter(self): resource = XMLResource(self.schema_class.meta_schema.source.url, lazy=False) self.assertFalse(resource.is_lazy()) lazy_resource = XMLResource(self.schema_class.meta_schema.source.url) self.assertTrue(lazy_resource.is_lazy()) tags = [x.tag for x in resource.iter()] self.assertEqual(len(tags), 1390) self.assertEqual(tags[0], '{%s}schema' % XSD_NAMESPACE) lazy_tags = [x.tag for x in lazy_resource.iter()] self.assertEqual(len(lazy_tags), 1390) self.assertEqual(lazy_tags[-1], '{%s}schema' % XSD_NAMESPACE) self.assertNotEqual(tags, lazy_tags) tags = [ x.tag for x in resource.iter('{%s}complexType' % XSD_NAMESPACE) ] self.assertEqual(len(tags), 56) self.assertEqual(tags[0], '{%s}complexType' % XSD_NAMESPACE) self.assertListEqual(tags, [ x.tag for x in lazy_resource.iter('{%s}complexType' % XSD_NAMESPACE) ]) def test_xml_resource_iter_subtrees(self): namespaces = {'xs': XSD_NAMESPACE} resource = XMLResource(self.schema_class.meta_schema.source.url, lazy=False) self.assertFalse(resource.is_lazy()) lazy_resource = XMLResource(self.schema_class.meta_schema.source.url) self.assertTrue(lazy_resource.is_lazy()) # Note: Element change with lazy resource so compare only tags tags = [x.tag for x in resource.iter_subtrees()] self.assertEqual(len(tags), 1) self.assertEqual(tags[0], '{%s}schema' % XSD_NAMESPACE) lazy_tags = [x.tag for x in lazy_resource.iter_subtrees()] self.assertListEqual(tags, lazy_tags) lazy_tags = [x.tag for x in lazy_resource.iter_subtrees(lazy_mode=2)] self.assertListEqual(tags, lazy_tags) lazy_tags = [x.tag for x in lazy_resource.iter_subtrees(lazy_mode=3)] self.assertEqual(len(lazy_tags), 156) lazy_tags = [x.tag for x in lazy_resource.iter_subtrees(lazy_mode=4)] self.assertEqual(len(lazy_tags), 157) self.assertEqual(tags[0], lazy_tags[-1]) lazy_tags = [x.tag for x in lazy_resource.iter_subtrees(lazy_mode=5)] self.assertEqual(len(lazy_tags), 158) self.assertEqual(tags[0], lazy_tags[0]) self.assertEqual(tags[0], lazy_tags[-1]) tags = [x.tag for x in resource.iter_subtrees(path='.')] self.assertEqual(len(tags), 1) self.assertEqual(tags[0], '{%s}schema' % XSD_NAMESPACE) lazy_tags = [x.tag for x in lazy_resource.iter_subtrees(path='.')] self.assertListEqual(tags, lazy_tags) tags = [x.tag for x in resource.iter_subtrees(path='*')] self.assertEqual(len(tags), 156) self.assertEqual(tags[0], '{%s}annotation' % XSD_NAMESPACE) lazy_tags = [x.tag for x in lazy_resource.iter_subtrees(path='*')] self.assertListEqual(tags, lazy_tags) tags = [ x.tag for x in resource.iter_subtrees('xs:complexType', namespaces) ] self.assertEqual(len(tags), 35) self.assertTrue( all(t == '{%s}complexType' % XSD_NAMESPACE for t in tags)) lazy_tags = [ x.tag for x in lazy_resource.iter_subtrees('xs:complexType', namespaces) ] self.assertListEqual(tags, lazy_tags) tags = [ x.tag for x in resource.iter_subtrees('. /. / xs:complexType', namespaces) ] self.assertEqual(len(tags), 35) self.assertTrue( all(t == '{%s}complexType' % XSD_NAMESPACE for t in tags)) lazy_tags = [ x.tag for x in lazy_resource.iter_subtrees('. /. / xs:complexType', namespaces) ] self.assertListEqual(tags, lazy_tags) def test_xml_resource_get_namespaces(self): with open(self.vh_xml_file) as schema_file: resource = XMLResource(schema_file) self.assertIsNone(resource.url) self.assertEqual(set(resource.get_namespaces().keys()), {'vh', 'xsi'}) self.assertFalse(schema_file.closed) with open(self.vh_xsd_file) as schema_file: resource = XMLResource(schema_file) self.assertIsNone(resource.url) self.assertEqual(set(resource.get_namespaces().keys()), {'xs', 'vh'}) self.assertFalse(schema_file.closed) resource = XMLResource(self.col_xml_file) self.assertEqual(resource.url, normalize_url(self.col_xml_file)) self.assertEqual(set(resource.get_namespaces().keys()), {'col', 'xsi'}) resource = XMLResource(self.col_xsd_file) self.assertEqual(resource.url, normalize_url(self.col_xsd_file)) self.assertEqual(set(resource.get_namespaces().keys()), {'', 'xs'}) resource = XMLResource("""<?xml version="1.0" ?> <root xmlns="tns1"> <tns:elem1 xmlns:tns="tns1" xmlns="unknown"/> </root>""", lazy=False) self.assertEqual(set(resource.get_namespaces().keys()), {'', 'tns', 'default'}) resource._lazy = True self.assertEqual(resource.get_namespaces().keys(), {''}) resource = XMLResource("""<?xml version="1.0" ?> <root xmlns:tns="tns1"> <tns:elem1 xmlns:tns="tns1" xmlns="unknown"/> </root>""", lazy=False) self.assertEqual(set(resource.get_namespaces().keys()), {'default', 'tns'}) self.assertEqual( resource.get_namespaces(root_only=True).keys(), {'tns'}) resource._lazy = True self.assertEqual(resource.get_namespaces().keys(), {'tns'}) resource = XMLResource("""<?xml version="1.0" ?> <root xmlns:tns="tns1"> <tns:elem1 xmlns:tns="tns3" xmlns="unknown"/> </root>""", lazy=False) self.assertEqual(set(resource.get_namespaces().keys()), {'default', 'tns', 'tns0'}) resource._lazy = True self.assertEqual(resource.get_namespaces().keys(), {'tns'}) def test_xml_resource_get_locations(self): resource = XMLResource(self.col_xml_file) self.check_url(resource.url, normalize_url(self.col_xml_file)) locations = resource.get_locations([('ns', 'other.xsd')]) self.assertEqual(len(locations), 2) self.check_url(locations[0][1], os.path.join(self.col_dir, 'other.xsd')) self.check_url(locations[1][1], normalize_url(self.col_xsd_file)) @unittest.skipIf(SKIP_REMOTE_TESTS or platform.system() == 'Windows', "Remote networks are not accessible or avoid SSL " "verification error on Windows.") def test_remote_schemas_loading(self): col_schema = self.schema_class( "https://raw.githubusercontent.com/brunato/xmlschema/master/" "tests/test_cases/examples/collection/collection.xsd") self.assertTrue(isinstance(col_schema, self.schema_class)) vh_schema = self.schema_class( "https://raw.githubusercontent.com/brunato/xmlschema/master/" "tests/test_cases/examples/vehicles/vehicles.xsd") self.assertTrue(isinstance(vh_schema, self.schema_class)) def test_schema_defuse(self): vh_schema = self.schema_class(self.vh_xsd_file, defuse='always') self.assertIsInstance(vh_schema.root, etree_element) for schema in vh_schema.maps.iter_schemas(): self.assertIsInstance(schema.root, etree_element) def test_schema_resource_access(self): vh_schema = self.schema_class(self.vh_xsd_file, allow='sandbox') self.assertTrue(isinstance(vh_schema, self.schema_class)) xsd_source = """ <xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:vh="http://example.com/vehicles"> <xs:import namespace="http://example.com/vehicles" schemaLocation="{}"/> </xs:schema>""".format(self.vh_xsd_file) schema = self.schema_class(xsd_source, allow='all') self.assertTrue(isinstance(schema, self.schema_class)) self.assertIn("http://example.com/vehicles", schema.maps.namespaces) self.assertEqual( len(schema.maps.namespaces["http://example.com/vehicles"]), 4) with warnings.catch_warnings(record=True) as ctx: warnings.simplefilter("always") self.schema_class(xsd_source, allow='remote') self.assertEqual(len(ctx), 1, "Expected one import warning") self.assertIn("block access to local resource", str(ctx[0].message)) schema = self.schema_class(xsd_source, allow='local') self.assertTrue(isinstance(schema, self.schema_class)) self.assertIn("http://example.com/vehicles", schema.maps.namespaces) self.assertEqual( len(schema.maps.namespaces["http://example.com/vehicles"]), 4) with self.assertRaises(XMLSchemaResourceError) as ctx: self.schema_class(xsd_source, allow='sandbox') self.assertIn("block access to files out of sandbox", str(ctx.exception)) schema = self.schema_class(xsd_source, base_url=os.path.dirname(self.vh_xsd_file), allow='all') self.assertTrue(isinstance(schema, self.schema_class)) self.assertIn("http://example.com/vehicles", schema.maps.namespaces) self.assertEqual( len(schema.maps.namespaces["http://example.com/vehicles"]), 4) with warnings.catch_warnings(record=True) as ctx: warnings.simplefilter("always") self.schema_class(xsd_source, base_url='/tmp', allow='sandbox') self.assertEqual(len(ctx), 1, "Expected one import warning") self.assertIn("block access to out of sandbox", str(ctx[0].message)) def test_fid_with_name_attr(self): """XMLResource gets correct data when passed a file like object with a name attribute that isn't on disk. These file descriptors appear when working with the contents from a zip using the zipfile module and with Django files in some instances. """ class FileProxy(object): def __init__(self, fid, fake_name): self._fid = fid self.name = fake_name def __getattr__(self, attr): try: return self.__dict__[attr] except (KeyError, AttributeError): return getattr(self.__dict__["_fid"], attr) with open(self.vh_xml_file) as xml_file: resource = XMLResource( FileProxy(xml_file, fake_name="not__on____disk.xml")) self.assertIsNone(resource.url) self.assertEqual(set(resource.get_namespaces().keys()), {'vh', 'xsi'}) self.assertFalse(xml_file.closed)
def pytest_sessionfinish(session, exitstatus): gpuBenchSess = session.config._gpubenchmarksession config = session.config asvOutputDir = config.getoption("benchmark_asv_output_dir") asvMetadata = config.getoption("benchmark_asv_metadata") gpuDeviceNums = config.getoption("benchmark_gpu_device") if asvOutputDir and gpuBenchSess.benchmarks: # FIXME: do not lookup commit metadata if already specified on the # command line. (commitHash, commitTime) = asvdbUtils.getCommitInfo() (commitRepo, commitBranch) = asvdbUtils.getRepoInfo() # FIXME: do not make pynvml calls if all the metadata provided by pynvml # was specified on the command line. smi.nvmlInit() # only supporting 1 GPU # FIXME: see if it's possible to auto detect gpu device number instead of # manually passing a value gpuDeviceHandle = smi.nvmlDeviceGetHandleByIndex(gpuDeviceNums[0]) uname = platform.uname() machineName = asvMetadata.get("machineName", uname.machine) cpuType = asvMetadata.get("cpuType", uname.processor) arch = asvMetadata.get("arch", uname.machine) pythonVer = asvMetadata.get( "pythonVer", ".".join(platform.python_version_tuple()[:-1])) cudaVer = asvMetadata.get("cudaVer", _getCudaVersion() or "unknown") osType = asvMetadata.get( "osType", _getOSName() or platform.linux_distribution()[0]) gpuType = asvMetadata.get( "gpuType", smi.nvmlDeviceGetName(gpuDeviceHandle).decode()) ram = asvMetadata.get("ram", "%d" % psutil.virtual_memory().total) gpuRam = asvMetadata.get( "gpuRam", "%d" % smi.nvmlDeviceGetMemoryInfo(gpuDeviceHandle).total) commitHash = asvMetadata.get("commitHash", commitHash) commitTime = asvMetadata.get("commitTime", commitTime) commitRepo = asvMetadata.get("commitRepo", commitRepo) commitBranch = asvMetadata.get("commitBranch", commitBranch) requirements = asvMetadata.get("requirements", "{}") suffixDict = dict( gpu_util="gpuutil", gpu_mem="gpumem", gpu_leaked_mem="gpu_leaked_mem", mean="time", ) unitsDict = dict( gpu_util="percent", gpu_mem="bytes", gpu_leaked_mem="bytes", mean="seconds", ) db = ASVDb(asvOutputDir, commitRepo, [commitBranch]) bInfo = BenchmarkInfo(machineName=machineName, cudaVer=cudaVer, osType=osType, pythonVer=pythonVer, commitHash=commitHash, commitTime=commitTime, branch=commitBranch, gpuType=gpuType, cpuType=cpuType, arch=arch, ram=ram, gpuRam=gpuRam, requirements=requirements) for bench in gpuBenchSess.benchmarks: benchName = _getHierBenchNameFromFullname(bench.fullname) # build the final params dict by extracting them from the # bench.params dictionary. Not all benchmarks are parameterized params = {} bench_params = bench.params.items( ) if bench.params is not None else [] for (paramName, paramVal) in bench_params: # If the params are coming from a fixture, handle them # differently since they will (should be) stored in a special # variable accessible with the name of the fixture. # # NOTE: "fixture_param_names" must be manually set by the # benchmark author/user using the "request" fixture! (see below) # # @pytest.fixture(params=[1,2,3]) # def someFixture(request): # request.keywords["fixture_param_names"] = ["the_param_name"] if hasattr(bench, "fixture_param_names") and \ (bench.fixture_param_names is not None) and \ (paramName in bench.fixture_param_names): fixtureName = paramName paramNames = _ensureListLike( bench.fixture_param_names[fixtureName]) paramValues = _ensureListLike(paramVal) for (pname, pval) in zip(paramNames, paramValues): params[pname] = pval # otherwise, a benchmark/test will have params added to the # bench.params dict as a standard key:value (paramName:paramVal) else: params[paramName] = paramVal resultList = [] for statType in ["mean", "gpu_mem", "gpu_leaked_mem", "gpu_util"]: bn = "%s_%s" % (benchName, suffixDict[statType]) val = getattr(bench.stats, statType, None) if val is not None: bResult = BenchmarkResult(funcName=bn, argNameValuePairs=list( params.items()), result=val) bResult.unit = unitsDict[statType] resultList.append(bResult) # If there were any custom metrics, add each of those as well as an # individual result to the same bInfo isntance. for customMetricName in bench.stats.getCustomMetricNames(): (result, unitString) = bench.stats.getCustomMetric(customMetricName) bn = "%s_%s" % (benchName, customMetricName) bResult = BenchmarkResult(funcName=bn, argNameValuePairs=list( params.items()), result=result) bResult.unit = unitString resultList.append(bResult) db.addResults(bInfo, resultList)
import asyncio import functools import ipaddress import os import platform import socket from collections import OrderedDict from random import SystemRandom import asyncssh from .constants import DEFAULT_LANG # Provide globals to test if we're on various Python versions python344 = platform.python_version_tuple() >= ('3', '4', '4') python35 = platform.python_version_tuple() >= ('3', '5', '0') python352 = platform.python_version_tuple() >= ('3', '5', '2') # Define a version of randrange which is based on SystemRandom(), so that # we get back numbers suitable for cryptographic use. _random = SystemRandom() randrange = _random.randrange # Avoid deprecation warning for asyncio.async() if python344: create_task = asyncio.ensure_future else: # pragma: no cover create_task = asyncio. async # pylint: disable=no-member
initializer, initargs, maxtasksperchild, wrap_exception, finalizer, finalargs, ), ) w.name = w.name.replace("Process", "PoolWorker") w.start() pool.append(w) util.debug("added worker") LoggingDaemonlessPool = LoggingDaemonlessPool38 if tuple(map(int, platform.python_version_tuple()[:2])) < (3, 8): # pragma: no cover LoggingDaemonlessPool = LoggingDaemonlessPool37 import platform import multiprocessing.pool from multiprocessing import util try: from multiprocessing.pool import MaybeEncodingError except: # pragma: no cover # Python 2.7.4 introduced this class. If we're on Python 2.7.0 to 2.7.3 # then we'll have to define it ourselves. :-/ class MaybeEncodingError(Exception): """Wraps possible unpickleable errors, so they can be safely sent through the socket."""
def environment_settings(): """Returns an array of arrays of environment settings """ env = [] # Agent information. env.append(('Agent Version', '.'.join(map(str, newrelic.version_info)))) if 'NEW_RELIC_ADMIN_COMMAND' in os.environ: env.append(('Admin Command', os.environ['NEW_RELIC_ADMIN_COMMAND'])) # System information. env.append(('Arch', platform.machine())) env.append(('OS', platform.system())) env.append(('OS version', platform.release())) env.append(('Total Physical Memory (MB)', total_physical_memory())) env.append(('Logical Processors', logical_processor_count())) physical_processor_packages, physical_cores = physical_processor_count() # Report this attribute only if it has a valid value. if physical_processor_packages: env.append( ('Physical Processor Packages', physical_processor_packages)) # Report this attribute only if it has a valid value. if physical_cores: env.append(('Physical Cores', physical_cores)) # Python information. env.append(('Python Program Name', sys.argv[0])) env.append(('Python Executable', sys.executable)) env.append(('Python Home', os.environ.get('PYTHONHOME', ''))) env.append(('Python Path', os.environ.get('PYTHONPATH', ''))) env.append(('Python Prefix', sys.prefix)) env.append(('Python Exec Prefix', sys.exec_prefix)) env.append(('Python Runtime', '.'.join(platform.python_version_tuple()))) env.append(('Python Implementation', platform.python_implementation())) env.append(('Python Version', sys.version)) env.append(('Python Platform', sys.platform)) env.append(('Python Max Unicode', sys.maxunicode)) # Extensions information. extensions = [] if 'newrelic.core._thread_utilization' in sys.modules: extensions.append('newrelic.core._thread_utilization') env.append(('Compiled Extensions', ', '.join(extensions))) # Dispatcher information. dispatcher = [] if not dispatcher and 'mod_wsgi' in sys.modules: mod_wsgi = sys.modules['mod_wsgi'] if hasattr(mod_wsgi, 'process_group'): if mod_wsgi.process_group == '': dispatcher.append(('Dispatcher', 'Apache/mod_wsgi (embedded)')) else: dispatcher.append(('Dispatcher', 'Apache/mod_wsgi (daemon)')) env.append( ('Apache/mod_wsgi Process Group', mod_wsgi.process_group)) else: dispatcher.append(('Dispatcher', 'Apache/mod_wsgi')) if hasattr(mod_wsgi, 'version'): dispatcher.append(('Dispatcher Version', str(mod_wsgi.version))) if hasattr(mod_wsgi, 'application_group'): env.append(('Apache/mod_wsgi Application Group', mod_wsgi.application_group)) if not dispatcher and 'uwsgi' in sys.modules: dispatcher.append(('Dispatcher', 'uWSGI')) uwsgi = sys.modules['uwsgi'] if hasattr(uwsgi, 'version'): dispatcher.append(('Dispatcher Version', uwsgi.version)) if not dispatcher and 'flup.server.fcgi' in sys.modules: dispatcher.append(('Dispatcher', 'flup/fastcgi (threaded)')) if not dispatcher and 'flup.server.fcgi_fork' in sys.modules: dispatcher.append(('Dispatcher', 'flup/fastcgi (prefork)')) if not dispatcher and 'flup.server.scgi' in sys.modules: dispatcher.append(('Dispatcher', 'flup/scgi (threaded)')) if not dispatcher and 'flup.server.scgi_fork' in sys.modules: dispatcher.append(('Dispatcher', 'flup/scgi (prefork)')) if not dispatcher and 'flup.server.ajp' in sys.modules: dispatcher.append(('Dispatcher', 'flup/ajp (threaded)')) if not dispatcher and 'flup.server.ajp_fork' in sys.modules: dispatcher.append(('Dispatcher', 'flup/ajp (forking)')) if not dispatcher and 'flup.server.cgi' in sys.modules: dispatcher.append(('Dispatcher', 'flup/cgi')) if not dispatcher and 'gunicorn' in sys.modules: if 'gunicorn.workers.ggevent' in sys.modules: dispatcher.append(('Dispatcher', 'gunicorn (gevent)')) elif 'gunicorn.workers.geventlet' in sys.modules: dispatcher.append(('Dispatcher', 'gunicorn (eventlet)')) else: dispatcher.append(('Dispatcher', 'gunicorn')) gunicorn = sys.modules['gunicorn'] if hasattr(gunicorn, '__version__'): dispatcher.append(('Dispatcher Version', gunicorn.__version__)) if not dispatcher and 'tornado' in sys.modules: dispatcher.append(('Dispatcher', 'tornado')) tornado = sys.modules['tornado'] if hasattr(tornado, 'version_info'): dispatcher.append( ('Dispatcher Version', str(tornado.version_info))) env.extend(dispatcher) # Module information. plugins = [] # Using six to create create a snapshot of sys.modules can occassionally # fail in a rare case when modules are imported in parallel by different # threads. This is because list(six.iteritems(sys.modules)) results in # list(iter(sys.modules.iteritems())), which means sys.modules could change # between the time when the iterable is handed over from the iter() to # list(). # # TL;DR: Do NOT use six module for the following iteration. for name, module in list(sys.modules.items()): if name.startswith('newrelic.hooks.'): plugins.append(name) elif name.find('.') == -1 and hasattr(module, '__file__'): # XXX This is disabled as it can cause notable overhead in # pathalogical cases. Will be replaced with a new system # where have a whitelist of packages we really want version # information for and will work out on case by case basis # how to extract that from the modules themselves. # try: # if 'pkg_resources' in sys.modules: # version = pkg_resources.get_distribution(name).version # if version: # name = '%s (%s)' % (name, version) # except Exception: # pass plugins.append(name) env.append(('Plugin List', plugins)) return env
def get_python_cmd(): major_version = platform.python_version_tuple()[0] return "python" if int(major_version) <= 2 else "python{0}".format( major_version)
async def _create_endpoint( cls, loop: asyncio.BaseEventLoop, prot: SOMEIPDatagramProtocol, family: socket.AddressFamily, local_addr: str, port: int, multicast_addr: typing.Optional[str] = None, multicast_interface: typing.Optional[str] = None, ttl: int = 1, ): if family not in (socket.AF_INET, socket.AF_INET6): raise ValueError("only IPv4 and IPv6 supported, got {family!r}") if os.name == "posix": # pragma: nocover # multicast binding: # - BSD: will only receive packets destined for multicast addr, # but will send with address from bind() # - Linux: will receive all multicast traffic destined for this port, # can be filtered using bind() bind_addr: typing.Optional[str] = local_addr if multicast_addr: bind_addr = None if platform.system() == "Linux": # pragma: nocover if family == socket.AF_INET or "%" in multicast_addr: bind_addr = multicast_addr else: bind_addr = f"{multicast_addr}%{multicast_interface}" # wrong type in asyncio typeshed, should be optional bind_addr = typing.cast(str, bind_addr) trsp, _ = await loop.create_datagram_endpoint( lambda: DatagramProtocolAdapter( prot, is_multicast=bool(multicast_addr) ), local_addr=(bind_addr, port), reuse_port=True, family=family, proto=socket.IPPROTO_UDP, flags=socket.AI_PASSIVE, ) elif platform.system() == "Windows": # pragma: nocover sock = socket.socket( family=family, type=socket.SOCK_DGRAM, proto=socket.IPPROTO_UDP ) if ( family == socket.AF_INET6 and platform.python_version_tuple() < ('3', '8', '4') and isinstance(loop, getattr(asyncio, "ProactorEventLoop", ())) ): prot.log.warning( "ProactorEventLoop has issues with ipv6 datagram sockets!" " https://bugs.python.org/issue39148. Update to Python>=3.8.4, or" " workaround with asyncio.set_event_loop_policy(" "asyncio.WindowsSelectorEventLoopPolicy())", ) # python disallowed SO_REUSEADDR on create_datagram_endpoint. # https://bugs.python.org/issue37228 # Windows doesnt have SO_REUSEPORT and the problem apparently does not exist # for multicast, so we need to set SO_REUSEADDR on the socket manually sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) addrinfos = await loop.getaddrinfo( local_addr, port, family=sock.family, type=sock.type, proto=sock.proto, flags=socket.AI_PASSIVE, ) if not addrinfos: raise RuntimeError( f"could not resolve local_addr={local_addr!r} port={port!r}" ) ai = addrinfos[0] sock.bind(ai[4]) trsp, _ = await loop.create_datagram_endpoint( lambda: DatagramProtocolAdapter( prot, is_multicast=bool(multicast_addr) ), sock=sock, ) else: # pragma: nocover raise NotImplementedError( f"unsupported platform {os.name} {platform.system()}" ) sock = trsp.get_extra_info("socket") try: if family == socket.AF_INET: packed_local_addr = pack_addr_v4(local_addr) if multicast_addr: packed_mcast_addr = pack_addr_v4(multicast_addr) mreq = struct.pack("=4s4s", packed_mcast_addr, packed_local_addr) sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq) sock.setsockopt( socket.IPPROTO_IP, socket.IP_MULTICAST_IF, packed_local_addr ) sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) # we want other implementations on the same host to receive our messages sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1) else: # AF_INET6 if multicast_interface is None: raise ValueError("ipv6 requires interface name") ifindex = socket.if_nametoindex(multicast_interface) if multicast_addr: packed_mcast_addr = pack_addr_v6(multicast_addr) mreq = struct.pack("=16sl", packed_mcast_addr, ifindex) sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, mreq) sock.setsockopt( socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_IF, struct.pack("=i", ifindex), ) sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_HOPS, ttl) # we want other implementations on the same host to receive our messages sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_LOOP, 1) except BaseException: trsp.close() raise return trsp
'reltol': 2e-8, 'abstol': 2e-8, 'verbose': True } sol = ecos.solve(c, G, h, dims, **myopts) yield check_solution, sol['x'][0], 4 sol = ecos.solve(c, G, h, dims, A, b, **myopts) yield check_solution, sol['x'][0], 3 new_dims = {'q': [2], 'l': 0} sol = ecos.solve(c, G, h, new_dims, **myopts) yield check_solution, sol['x'][0], 2 if platform.python_version_tuple() < ('3', '0', '0'): def test_problems_with_longs(): new_dims = {'q': [], 'l': long(2)} myopts = { 'feastol': 2e-8, 'reltol': 2e-8, 'abstol': 2e-8 } sol = ecos.solve(c, G, h, new_dims, **myopts) yield check_solution, sol['x'][0], 4 sol = ecos.solve(c, G, h, new_dims, A, b, **myopts) yield check_solution, sol['x'][0], 3 new_dims = {'q': [long(2)], 'l': 0}
import ctypes import json import platform from threading import Lock mutex = Lock() # We need to do things slightly differently for Python 2 vs. 3 # ... because the way str/unicode have changed to bytes/str if platform.python_version_tuple()[0] == '2': # Using Python 2 bytes = str _PYTHON_3 = False else: # Assume using Python 3+ unicode = str _PYTHON_3 = True def _convert_to_charp(string): # Prepares function input for use in c-functions as char* if type(string) == unicode: return string.encode("UTF-8") elif type(string) == bytes: return string else: raise TypeError( "Expected unicode string values or ascii/bytes values. Got: %r" % type(string))
def num_py_major_minor_tuple(): return map(num_pyver, platform.python_version_tuple()[0:2])
""" A thin, practical wrapper around terminal capabilities in Python. http://pypi.python.org/pypi/blessed """ # std imports import platform as _platform if _platform.system() == 'Windows': from blessed.win_terminal import Terminal else: from blessed.terminal import Terminal if ('3', '0', '0') <= _platform.python_version_tuple() < ('3', '2', '2+'): # Good till 3.2.10 # Python 3.x < 3.2.3 has a bug in which tparm() erroneously takes a string. raise ImportError('Blessed needs Python 3.2.3 or greater for Python 3 ' 'support due to http://bugs.python.org/issue10570.') __all__ = ('Terminal', ) __version__ = '1.17.1'
def get_python_version(): """获取当前 python版本""" py_version = platform.python_version_tuple() py_version = int(py_version[0]) * 10 + int(py_version[1]) print('current python version:', int(py_version) * 0.1) return py_version
streamHandler = logging.StreamHandler() streamHandler.setFormatter(CustomFormatter(datefmt="%X")) loggingHandler = CustomMemoryHandler(600, target=streamHandler) ROOT_LOGGER.addHandler(loggingHandler) logging.captureWarnings(True) if sys.platform.startswith("win"): from asyncio import ProactorEventLoop loop = ProactorEventLoop() os.system("color") os.system("cls") else: os.system("clear") if platform.python_version_tuple() < ("3", "7", "3"): print("Please run this script with Python 3.7.3 or above." "\nExiting the script.") sys.exit(1) if config_file.exists(): config.read(config_file) resolve_env(config) try: resolve_env(config) except ValueError: print("Please make sure you have a proper config.ini in this directory " "or the required environment variables set." "\nExiting the script.") sys.exit(1)
#!/usr/bin/env python # -*- coding: utf-8 -*- import platform vers = platform.python_version_tuple() import os, sys here = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, here) if 2 >= int(vers[0]): from .py2.textree import * elif 3 <= int(vers[0]): from .py3.textree import * else: raise Exception( 'There is no source code corresponding to the specified Python version.' )
stick = blinkstick.find_first() # Create a listener that turns the leds off when the program terminates signal.signal(signal.SIGTERM, signal_handler) signal.signal(signal.SIGINT, signal_handler) _gamma = np.load(config.GAMMA_TABLE_PATH) """Gamma lookup table used for nonlinear brightness correction""" _prev_pixels = np.tile(253, (3, config.N_PIXELS)) """Pixel values that were most recently displayed on the LED strip""" _prev_rgb = np.tile(253, config.N_PIXELS) pixels = np.tile(1, (3, config.N_PIXELS)) """Pixel values for the LED strip""" _is_python_2 = int(platform.python_version_tuple()[0]) == 2 def _update_esp8266(): """Sends UDP packets to ESP8266 to update LED strip values The ESP8266 will receive and decode the packets to determine what values to display on the LED strip. The communication protocol supports LED strips with a maximum of 256 LEDs. The packet encoding scheme is: |i|r|g|b| where i (0 to 255): Index of LED to change (zero-based) r (0 to 255): Red value of LED g (0 to 255): Green value of LED
def is_python2(): return platform.python_version_tuple()[0] == '2'
def _codecov_submit(): if os.getenv('CI') == 'true' and os.getenv('TRAVIS') == 'true': # http://docs.travis-ci.com/user/environment-variables/#Default-Environment-Variables build_url = 'https://travis-ci.org/%s/jobs/%s' % ( os.getenv('TRAVIS_REPO_SLUG'), os.getenv('TRAVIS_JOB_ID')) query = { 'service': 'travis', 'branch': os.getenv('TRAVIS_BRANCH'), 'build': os.getenv('TRAVIS_JOB_NUMBER'), 'pr': os.getenv('TRAVIS_PULL_REQUEST'), 'job': os.getenv('TRAVIS_JOB_ID'), 'tag': os.getenv('TRAVIS_TAG'), 'slug': os.getenv('TRAVIS_REPO_SLUG'), 'commit': os.getenv('TRAVIS_COMMIT'), 'build_url': build_url, } root = os.getenv('TRAVIS_BUILD_DIR') elif os.getenv('CI') == 'True' and os.getenv('APPVEYOR') == 'True': # http://www.appveyor.com/docs/environment-variables build_url = 'https://ci.appveyor.com/project/%s/build/%s' % (os.getenv( 'APPVEYOR_REPO_NAME'), os.getenv('APPVEYOR_BUILD_VERSION')) query = { 'service': "appveyor", 'branch': os.getenv('APPVEYOR_REPO_BRANCH'), 'build': os.getenv('APPVEYOR_JOB_ID'), 'pr': os.getenv('APPVEYOR_PULL_REQUEST_NUMBER'), 'job': '/'.join((os.getenv('APPVEYOR_ACCOUNT_NAME'), os.getenv('APPVEYOR_PROJECT_SLUG'), os.getenv('APPVEYOR_BUILD_VERSION'))), 'tag': os.getenv('APPVEYOR_REPO_TAG_NAME'), 'slug': os.getenv('APPVEYOR_REPO_NAME'), 'commit': os.getenv('APPVEYOR_REPO_COMMIT'), 'build_url': build_url, } root = os.getenv('APPVEYOR_BUILD_FOLDER') elif os.getenv('CI') == 'true' and os.getenv('CIRCLECI') == 'true': # https://circleci.com/docs/environment-variables query = { 'service': 'circleci', 'branch': os.getenv('CIRCLE_BRANCH'), 'build': os.getenv('CIRCLE_BUILD_NUM'), 'pr': os.getenv('CIRCLE_PR_NUMBER'), 'job': os.getenv('CIRCLE_BUILD_NUM') + "." + os.getenv('CIRCLE_NODE_INDEX'), 'tag': os.getenv('CIRCLE_TAG'), 'slug': os.getenv('CIRCLE_PROJECT_USERNAME') + "/" + os.getenv('CIRCLE_PROJECT_REPONAME'), 'commit': os.getenv('CIRCLE_SHA1'), 'build_url': os.getenv('CIRCLE_BUILD_URL'), } if sys.version_info < (3, ): root = os.getcwdu() else: root = os.getcwd() else: root = package_root if not os.path.exists(os.path.join(root, '.git')): print('git repository not found, not submitting coverage data') return git_status = _git_command(['status', '--porcelain'], root) if git_status != '': print( 'git repository has uncommitted changes, not submitting coverage data' ) return slug = None token = None try: with open(os.path.join(root, 'codecov.json'), 'rb') as f: json_data = json.loads(f.read().decode('utf-8')) slug = json_data['slug'] token = json_data['token'] except (OSError, ValueError, UnicodeDecodeError, KeyError): print('error reading codecov.json') return branch = _git_command(['rev-parse', '--abbrev-ref', 'HEAD'], root) commit = _git_command(['rev-parse', '--verify', 'HEAD'], root) tag = _git_command(['name-rev', '--tags', '--name-only', commit], root) impl = _plat.python_implementation() major, minor = _plat.python_version_tuple()[0:2] build_name = '%s %s %s.%s' % (_platform_name(), impl, major, minor) query = { 'branch': branch, 'commit': commit, 'slug': slug, 'token': token, 'build': build_name, } if tag != 'undefined': query['tag'] = tag payload = 'PLATFORM=%s\n' % _platform_name() payload += 'PYTHON_VERSION=%s %s\n' % (_plat.python_version(), _plat.python_implementation()) if 'oscrypto' in sys.modules: payload += 'OSCRYPTO_BACKEND=%s\n' % sys.modules['oscrypto'].backend() payload += '<<<<<< ENV\n' for path in _list_files(root): payload += path + '\n' payload += '<<<<<< network\n' payload += '# path=coverage.xml\n' with open(os.path.join(root, 'coverage.xml'), 'r', encoding='utf-8') as f: payload += f.read() + '\n' payload += '<<<<<< EOF\n' url = 'https://codecov.io/upload/v4' headers = {'Accept': 'text/plain'} filtered_query = {} for key in query: value = query[key] if value == '' or value is None: continue filtered_query[key] = value print('Submitting coverage info to codecov.io') info = _do_request('POST', url, headers, query_params=filtered_query) encoding = info[1] or 'utf-8' text = info[2].decode(encoding).strip() parts = text.split() upload_url = parts[1] headers = { 'Content-Type': 'text/plain', 'x-amz-acl': 'public-read', 'x-amz-storage-class': 'REDUCED_REDUNDANCY' } print('Uploading coverage data to codecov.io S3 bucket') _do_request('PUT', upload_url, headers, data=payload.encode('utf-8'))