def handle(self, **options): client = self.get_client(options) # First, see if we have an ssh public key public_key = self.get_public_key() if "Windows" in platform.platform(): self.win_setup_ssh() if not public_key: self.make_key() public_key = self.get_public_key() if not public_key: raise CommandError("Unknown error making SSH key.") if "Windows" in platform.platform(): self.win_write_keyinfo() response, content = client.post( '/account/sshkeys/', { "key": public_key, }, ) if response.status in (200, 302): if content and "form" in content and "errors" in content["form"]: print content['form']['errors']['key'][0] else: print "Added SSH key." else: raise CommandError("Unknown error, %s: %s" % (response.status, content))
def adapter(self): """ change byte into MB or KB into MB """ #according system type to decide the keyword if platform.platform().find("Ubuntu") != -1: keyword = "kB" if platform.platform().find("debian") != -1: keyword = "kB" elif platform.platform().find("centos") != -1: keyword = "kB" elif platform.platform().find("redhat") != -1: keyword = "byt" else: print "[ERROR]unknow platform" #change self data dictionary key with byte/kb into MB for i in xrange(len(self.type)): if self.type[i].find(keyword) != -1: temp_data = self.data.pop(self.type[i]) for j in xrange(len(temp_data)): if keyword == "kB": temp_data[j] = float("%0.2f" % (temp_data[j] / 1024)) if keyword == "byt": temp_data[j] = float("%0.2f" % (temp_data[j] / 1048576)) self.data.setdefault(self.type[i].replace(keyword, "") + "(MB)", temp_data) #change self type with byte/kb into MB for i in xrange(len(self.type)): if self.type[i].find(keyword) != -1: self.type[i] = self.type[i].replace(keyword, "") + "(MB)"
def test_macos(self): self.addCleanup(self.clear_caches) uname = ('Darwin', 'hostname', '17.7.0', ('Darwin Kernel Version 17.7.0: ' 'Thu Jun 21 22:53:14 PDT 2018; ' 'root:xnu-4570.71.2~1/RELEASE_X86_64'), 'x86_64', 'i386') arch = ('64bit', '') with mock.patch.object(platform, 'uname', return_value=uname), \ mock.patch.object(platform, 'architecture', return_value=arch): for mac_ver, expected_terse, expected in [ # darwin: mac_ver() returns empty strings (('', '', ''), 'Darwin-17.7.0', 'Darwin-17.7.0-x86_64-i386-64bit'), # macOS: mac_ver() returns macOS version (('10.13.6', ('', '', ''), 'x86_64'), 'macOS-10.13.6', 'macOS-10.13.6-x86_64-i386-64bit'), ]: with mock.patch.object(platform, 'mac_ver', return_value=mac_ver): self.clear_caches() self.assertEqual(platform.platform(terse=1), expected_terse) self.assertEqual(platform.platform(), expected)
def compile_moan(): # Fix the Makefile from re import compile p = compile(r'^CC=gcc.*$') makefile = "src/MoAn/Makefile" if not os.path.exists(makefile): return lines = open(makefile).readlines() f = open(makefile, "w") for line in lines: if p.match(line): if platform().find("fedora") != -1: f.write("CC=gcc34\n") elif platform().find("gentoo") != -1: f.write("CC=gcc-4.1.2\n") elif platform().find("Ubuntu") != -1 or platform().find("debian"): f.write("CC=gcc-4.1\n") else: f.write(line) else: f.write(line) f.close() Popen(["make"], cwd="src/MoAn/", stdout=PIPE, stderr=PIPE).communicate() if os.path.exists("src/MoAn/moan"): return True
def print_platform_info(): import platform logging.debug('*************** PLATFORM INFORMATION ************************') logging.debug('==Interpreter==') logging.debug('Version :' + platform.python_version()) logging.debug('Version tuple:' + str(platform.python_version_tuple())) logging.debug('Compiler :' + platform.python_compiler()) logging.debug('Build :' + str(platform.python_build())) logging.debug('==Platform==') logging.debug('Normal :' + platform.platform()) logging.debug('Aliased:' + platform.platform(aliased=True)) logging.debug('Terse :' + platform.platform(terse=True)) logging.debug('==Operating System and Hardware Info==') logging.debug('uname:' + str(platform.uname())) logging.debug('system :' + platform.system()) logging.debug('node :' + platform.node()) logging.debug('release :' + platform.release()) logging.debug('version :' + platform.version()) logging.debug('machine :' + platform.machine()) logging.debug('processor:' + platform.processor()) logging.debug('==Executable Architecture==') logging.debug('interpreter:' + str(platform.architecture())) logging.debug('/bin/ls :' + str(platform.architecture('/bin/ls'))) logging.debug('*******************************************************')
def get_system_type(): print platform.system() print platform.machine() print platform.mac_ver() print platform.win32_ver() print platform.linux_distribution() print platform.platform()
def resolveOS (self): if sys.platform == 'win32': self.osType = "Windows" p = platform.platform() + "-" + platform.architecture()[0] if fnmatch.fnmatch(p, "Windows-*-6.3*-64*"): w = "8.1" elif fnmatch.fnmatch(p, "Windows-*-6.2*-64*"): w = "8" elif fnmatch.fnmatch(p, "Windows-*-6.1*-64*"): w = "7" elif fnmatch.fnmatch(p, "Windows-*-6.1*-32*"): w = "7" elif fnmatch.fnmatch(p, "Windows-*-6.0*-64*"): w = "Vista" elif fnmatch.fnmatch(p, "Windows-*-5.*"): w = "XP" else: w = "XP" self.osInfo = "Windows, " + w + ", " + p elif sys.platform == 'darwin': self.osType = "MacOS" self.osInfo = "MacOSX, " + \ platform.mac_ver()[0] + ", " + \ platform.platform() else: self.osType = "Linux" d = platform.linux_distribution() self.osInfo = d[0]+", "+d[1]+", "+ platform.platform()
def print_python_env(): print('-------------------------------------------------------------') print('Interpreter') print('platform.python_version: ', platform.python_version()) print('platform.python_compiler: ', platform.python_compiler()) print('platform.python_build: ', platform.python_build()) print() print('Platform') print('platform.platform(Normal): ', platform.platform()) print('platform.platform(Aliased): ', platform.platform(aliased=True)) print('platform.platform(Terse): ', platform.platform(terse=True)) print() print('Operating System and Hardware Info') print('platform.uname: ', platform.uname()) print('platform.system: ', platform.system()) print('platform.node: ', platform.node()) print('platform.release: ', platform.release()) print('platform.version: ', platform.version()) print('platform.machine: ', platform.machine()) print('platform.processor: ', platform.processor()) print() print('Executable Architecture') print('platform.architecture: ', platform.architecture()) #print() #print('OS') #print('os.uname: ', os.uname()) #print('os.getcwd: ', os.getcwd()) print() print('Network') print('socket.gethostname: ', socket.gethostname()) print('socket.gethostbyname ', socket.gethostbyname(socket.gethostname())) print('-------------------------------------------------------------')
def copy_to_clipboard(text): """ Copies a text to clipboard so that it can be pasted. http://stackoverflow.com/a/10091465 Example on Ubuntu (tested): import platform print platform.platform() print platform.system() print platform.release() print platform.version() Linux-3.2.0-29-generic-x86_64-with-Ubuntu-12.04-precise Linux 3.2.0-29-generic #46-Ubuntu SMP Fri Jul 27 17:03:23 UTC 2012 Example on Windows (quoted): print platform.system() 'Windows' print platform.release() 'XP' print platform.version() '5.1.2600' """ if "Linux" in platform.platform(): copy_to_clipboard_on_linux(text=text) elif "Windows" in platform.platform(): copy_to_clipboard_on_windows(text=text) elif "darwin" in sys.platform: copy_to_clipboard_on_mac(text=text)
def start(self): assert not self.running self.exiting = False self.flag = True # assume changes initially self.event.set() if not self.use_polling: if platform.platform().startswith('Linux'): print(self.options.msg_prefix() + 'monitoring local changes in %s' % self.options.working_copy_root) self.proc = subprocess.Popen(['inotifywait', '--monitor', '--recursive', '--quiet', '-e', 'attrib', '-e', 'close_write', '-e', 'move', '-e', 'create', '-e', 'delete', '--format', '%w%f', self.options.working_copy_root], stdin=subprocess.PIPE, stdout=subprocess.PIPE) self.proc.stdin.close() self.thread = threading.Thread(target=self._inotifywait_handler, args=()) self.thread.start() elif platform.platform().startswith('Windows'): print(self.options.msg_prefix() + 'monitoring local changes in %s' % self.options.working_copy_root) # FILE_LIST_DIRECTORY = 1 (for the second argument) self.handle = win32file.CreateFile(self.options.working_copy_root, 1, win32con.FILE_SHARE_READ | win32con.FILE_SHARE_WRITE | win32con.FILE_SHARE_DELETE, None, win32con.OPEN_EXISTING, win32con.FILE_FLAG_BACKUP_SEMANTICS, None) self.thread = threading.Thread(target=self._read_directory_changes_handler, args=()) self.thread.start() else: # TODO: support Kevent for BSD self.use_polling = True if self.use_polling: print(self.options.msg_prefix() + \ 'monitoring local changes in %s (polling)' % self.options.working_copy_root) self.running = True
def version_table(): """Returns a summarized version table of all software compiled in, with their respective versions.""" space = ' ' packsize = 20 descsize = 55 version_dict = {} version_dict.update(bob.core.version) version_dict.update(bob.io.version) version_dict.update(bob.sp.version) version_dict.update(bob.ip.version) if hasattr(bob.machine, 'version'): version_dict.update(bob.machine.version) if bob.has_daq and hasattr(bob.daq, 'version'): version_dict.update(bob.daq.version) if bob.has_visioner and hasattr(bob.visioner, 'version'): version_dict.update(bob.visioner.version) build = pkg_resources.require('bob')[0] bob_version = "'%s' (%s)" % (build.version, platform.platform()) print 75*'=' print (" bob %s" % bob_version).center(75) print 75*'=' print "" distribution = pkg_resources.require('bob')[0] print "Python Egg Properties" print "---------------------\n" print " * Version : '%s'" % build.version print " * System : '%s'" % platform.system() print " * Platform : '%s'" % platform.platform() print " * Python Version : '%s'" % platform.python_version() print " * Egg Dependencies: " for egg in my_eggs(): print " - %s, version '%s'" % (egg.key, egg.version) print "" print "Compiled-in Dependencies" print "------------------------\n" sep = space + packsize*'=' + space + descsize*'=' fmt = 2*space + ('%%%ds' % packsize) + space + ('%%%ds' % descsize) print sep print fmt % ('Package'.ljust(packsize), 'Version'.ljust(descsize)) print sep for k in sorted(version_dict.keys()): v = version_dict[k] if k.lower() == 'numpy': v = '%s (%s)' % (numpy.version.version, v) if k.lower() == 'compiler': v = '-'.join(v) elif k.lower() == 'ffmpeg': if v.has_key('ffmpeg'): v = v['ffmpeg'] else: v = ';'.join(['%s-%s' % (x, v[x]) for x in v.keys()]) elif k.lower() == 'qt4': v = '%s (from %s)' % v elif k.lower() == 'fftw': v = '%s (%s)' % v[:2] print fmt % (k.ljust(packsize), v.ljust(descsize)) print sep
def TestPlatForm(): print "--------------Operation System------------" print platform.architecture() print platform.platform() print platform.system() print platform.python_version()
def platform_path(): #from parley_dir if 'Darwin' in platform.platform(): return 'osx/bin/gpg' elif 'Windows' in platform.platform(): return 'win32\gpg.exe' elif 'Linux' in platform.platform(): return 'linux/bin/gpg'
def __init__(self, name, switch_labels): ''' Creates a new QuadStick object. ''' # Set constants self.BAND = 0.2 # Must be these close to neutral for hold / autopilot # Init pygame pygame.init() pygame.display.init() self.screen = pygame.display.set_mode((500,280), pygame.locals.RESIZABLE) self.font = pygame.font.SysFont('Courier', 20) pygame.display.set_caption('QuadStick: ' + name) # Supports keyboard polling self.keys = [] self.name = name self.platform = platform()[0:platform().find('-')] self.row_height = 30 self.paused = False # pygame.joystick.init() # self.joystick = pygame.joystick.Joystick(0) # self.joystick.init() # self.joystick.get_axis(0) self.ready = False self.switch_labels = switch_labels
def init_log(self, phone_number): #读取日志的路径 cur_script_dir = os.path.split(os.path.realpath(__file__))[0] cfg_path = os.path.join(cur_script_dir, "db.conf") cfg_reder = ConfigParser.ConfigParser() cfg_reder.readfp(codecs.open(cfg_path, "r", "utf_8")) today = datetime.date.today().strftime('%Y%m%d') self._SECNAME = "LOGPATH" if platform.platform().find("windows") != -1 or platform.platform().find("Windows") != -1: self._OPTNAME = "WINDOWS_LOGDIR" else: self._OPTNAME = "LINUX_LOGDIR" self._LOGROOT = cfg_reder.get(self._SECNAME, self._OPTNAME) #创建日志文件的路径 log_path = os.path.join(self._LOGROOT, today) if not os.path.isdir(log_path): os.makedirs(log_path) self.logger = comm_log(phone_number, logpath=log_path) self.imgroot = os.path.join(self._LOGROOT, 'img') # 如果目录不存在,则创建一个目录 if not os.path.isdir(self.imgroot): os.makedirs(self.imgroot) return self.logger
def install_path(): # from parley_dir if 'Darwin' in platform.platform(): return './osx-install.sh' elif 'Windows' in platform.platform(): return None elif 'Linux' in platform.platform(): return './linux-install.sh'
def get_platform(): if 'Darwin' in platform.platform(): return 'osx' elif 'Windows' in platform.platform(): return 'win32' elif 'Linux' in platform.platform(): return 'linux'
def ndk_bin_path(ndk): ''' Return the prebuilt bin path for the host OS. If Python executable is the NDK-prebuilt one (it should be) then use the location of the executable as the first guess. We take the grand-parent foldername and then ensure that it starts with one of 'linux', 'darwin' or 'windows'. If this is not the case, then we're using some other Python and fall-back to using platform.platform() and sys.maxsize. ''' try: ndk_host = os.path.basename( os.path.dirname( os.path.dirname(sys.executable))) except: ndk_host = '' # NDK-prebuilt Python? if (not ndk_host.startswith('linux') and not ndk_host.startswith('darwin') and not ndk_host.startswith('windows')): is64bit = True if sys.maxsize > 2**32 else False if platform.platform().startswith('Linux'): ndk_host = 'linux%s' % ('-x86_64' if is64bit else '-x86') elif platform.platform().startswith('Darwin'): ndk_host = 'darwin%s' % ('-x86_64' if is64bit else '-x86') elif platform.platform().startswith('Windows'): ndk_host = 'windows%s' % ('-x86_64' if is64bit else '') else: ndk_host = 'UNKNOWN' return ndk+os.sep+'prebuilt'+os.sep+ndk_host+os.sep+'bin'
def create_get_response(rfc_num,rfc_title): current_path = os.getcwd() message_list = [] filename = current_path+"/RFC/"+str(rfc_num)+"_"+str(rfc_title)+".txt" current_time = time.strftime("%a, %d %b %Y %X %Z", time.localtime()) if os.path.isfile(filename): content_length = os.path.getsize(filename) message = "P2P-CI/1.0 200 OK\n"\ "Date: "+str(current_time)+"\n"\ "OS: "+str(platform.platform())+"\n"\ "Last-Modified: "+str(time.ctime(os.path.getmtime(filename)))+"\n"\ "Content-Length:"+str(content_length)+"\n"\ "Content-Type: text/text \n" else: message = "P2P-CI/1.0 404 Not Found\n"\ "Date: "+str(current_time)+"\n"\ "OS: "+str(platform.platform())+"\n" print "GET Response Message sent by Upload Peer" print message message_list.append(message) return message_list print "GET Response Message sent by Upload Peer" data_stream = open(filename,'r') data = data_stream.read() print message+str(data) message_list.append(message) message_list.append(data) return message_list
def set_locations(self, datadir): self.locations = [] if platform.platform() != "Windows": for d in self.get_xdg_data_dirs(): self.locations.append(os.path.join(d, "pluma", "plugins", "externaltools", "tools")) self.locations.append(datadir) # self.locations[0] is where we save the custom scripts if platform.platform() == "Windows": toolsdir = os.path.expanduser("~/pluma/tools") else: userdir = os.getenv("MATE22_USER_DIR") if userdir: toolsdir = os.path.join(userdir, "pluma/tools") else: toolsdir = os.path.expanduser("~/.config/pluma/tools") self.locations.insert(0, toolsdir) if not os.path.isdir(self.locations[0]): os.makedirs(self.locations[0]) self.tree = ToolDirectory(self, "") self.import_old_xml_store() else: self.tree = ToolDirectory(self, "")
def print_platform_info(): import platform logging.debug("*************** PLATFORM INFORMATION ************************") logging.debug("==Interpreter==") logging.debug("Version :" + platform.python_version()) logging.debug("Version tuple:" + str(platform.python_version_tuple())) logging.debug("Compiler :" + platform.python_compiler()) logging.debug("Build :" + str(platform.python_build())) logging.debug("==Platform==") logging.debug("Normal :" + platform.platform()) logging.debug("Aliased:" + platform.platform(aliased=True)) logging.debug("Terse :" + platform.platform(terse=True)) logging.debug("==Operating System and Hardware Info==") logging.debug("uname:" + str(platform.uname())) logging.debug("system :" + platform.system()) logging.debug("node :" + platform.node()) logging.debug("release :" + platform.release()) logging.debug("version :" + platform.version()) logging.debug("machine :" + platform.machine()) logging.debug("processor:" + platform.processor()) logging.debug("==Executable Architecture==") logging.debug("interpreter:" + str(platform.architecture())) logging.debug("/bin/ls :" + str(platform.architecture("/bin/ls"))) logging.debug("*******************************************************")
def setup(): global newImageQueue global udp global demoTransmitter global serialPort newImageQueue = Queue(2) print(platform.platform()) os = platform.platform() if ('Linux' in os): serialPort = '/dev/ttyACM0' elif ('Windows' in os): serialPort = 'COM6' else: # should not get here serialPort = 'dummy' # testing #serialPort = 'dummy' serialConfigure(serialPort) # configure input udp = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # UDP udp.bind((UDP_IP, UDP_PORT)) # create window #size(ledCnt, stripCnt) # create the window demoTransmitter = DemoTransmitter(ledCnt, stripCnt, newImageQueue) demoTransmitter.start()
def check_user_rights(): try: from platform import platform except ImportError: quit('Cannot load platform module') else: if not platform() and platform() != '' and platform()[:7].lower() != 'windows': try: from os import geteuid, getlogin, setgid, setuid except ImportError: quit('Cannot load geteuit, getlogin, setgid or setuid, quitting!') else: if name == 'posix' and not geteuid(): login = getlogin() if login != 'root': print "Bringing down user rights" try: import pwd except ImportError as e: exit("Failed loading the pwd module!") else: user = pwd.getpwnam(login) setgid(user.pw_gid) setuid(user.pw_uid)
def retrieve_images(script, timeout): """Downloading all images for the script """ success= [] for scene in script: img_url= scene['media_url'] img_filename= img_url.split('/')[-1] # Image should be downloaded only once. if path.exists(conf.IMAGE_PATH + img_filename): continue # Getting images if 'Darwin' in platform(): # Mac OS X p= subprocess.Popen(['curl', '-s', '-o', conf.IMAGE_PATH + img_filename, '--create-dirs', img_url]) elif 'Linux' in platform(): # Linux p= subprocess.Popen(['wget', '-q', '-nH', '-P', conf.IMAGE_PATH, img_url]) tmout= timeout while p.poll() != 0 and tmout > 0: time.sleep(0.1) tmout= float(tmout) - 0.1 if tmout == 0: # reached timeout success.append(False) else: success.append(True) # Check if everything went well, and report only in case of problems if all(success): pass else: print "WARNING! There were problems downloading images. Check your internet connection and try again"
def get_platform(): if "Darwin" in platform.platform(): return "osx" elif "Windows" in platform.platform(): return "win32" elif "Linux" in platform.platform(): return "linux"
def __init__(self): print("Starting Proxy") self.ext_sock = socket.socket() self.ext_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) if("Windows" not in platform.platform()): self.ext_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) self.ext_sock.bind(EXT) self.int_sock = socket.socket() self.int_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) if("Windows" not in platform.platform()): self.int_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) self.int_sock.bind(INT) if(not SERVER): self.int_sock.listen(5) connint, addr = self.int_sock.accept() localvpnport=addr[1] print("Verbindung zu VPN hergestellt") self.ext_sock.connect(PEER) print("Verbindung zu PEER hergestellt") proxy_extern = Proxy(connint.dup(), self.ext_sock.dup()) else: self.int_sock.connect(LOCALVPN) print("Verbindung zu VPN hergestellt") self.ext_sock.listen(5) connext, addr = self.ext_sock.accept() print("Verbindung zu PEER hergestellt") proxy_intern = Proxy(self.int_sock.dup(), connext.dup()) tintern = threading.Thread(target=Proxy.intern) textern = threading.Thread(target=Proxy.extern) while True: print("vor weiter") dataint = tintern.start() print("WEITER") dataext = textern.start()
def load_cifar10(self, directory): # 读取训练集 images, labels = [], [] for filename in ['%s/data_batch_%d' % (directory, j) for j in range(1, 6)]: with open(filename, 'rb') as fo: if 'Windows' in platform.platform(): cifar10 = pickle.load(fo, encoding='bytes') elif 'Linux' in platform.platform(): cifar10 = pickle.load(fo) for i in range(len(cifar10[b"labels"])): image = numpy.reshape(cifar10[b"data"][i], (3, 32, 32)) image = numpy.transpose(image, (1, 2, 0)) image = image.astype(float) images.append(image) labels += cifar10[b"labels"] images = numpy.array(images, dtype='float') labels = numpy.array(labels, dtype='int') self.train_images, self.train_labels = images, labels # 读取测试集 images, labels = [], [] for filename in ['%s/test_batch' % (directory)]: with open(filename, 'rb') as fo: if 'Windows' in platform.platform(): cifar10 = pickle.load(fo, encoding='bytes') elif 'Linux' in platform.platform(): cifar10 = pickle.load(fo) for i in range(len(cifar10[b"labels"])): image = numpy.reshape(cifar10[b"data"][i], (3, 32, 32)) image = numpy.transpose(image, (1, 2, 0)) image = image.astype(float) images.append(image) labels += cifar10[b"labels"] images = numpy.array(images, dtype='float') labels = numpy.array(labels, dtype='int') self.test_images, self.test_labels = images, labels
def device_info(self): ''' Device Info is sent only when the device is getting registered to the Device Manager. This method should be used to send the static information about the device. Device Manager can decide to invoke this method from server side if needed ''' props = { "python_info": { "version": platform.python_version(), "version_tuple": platform.python_version_tuple(), "compiler": platform.python_compiler(), "build": platform.python_build() }, "platform": { "normal": platform.platform(), "alias": platform.platform(aliased=True), "terse": platform.platform(terse=True) }, "os": { "name": platform.uname() }, "hardware": { "system": platform.system(), "node": platform.node(), "release": platform.release(), "version": platform.version(), "machine": platform.machine(), "processor": platform.processor() } } return props
def set_locations(self, datadir): self.locations = [] if platform.platform() != 'Windows': for d in self.get_xdg_data_dirs(): self.locations.append(os.path.join(d, 'gedit-2', 'plugins', 'externaltools', 'tools')) self.locations.append(datadir) # self.locations[0] is where we save the custom scripts if platform.platform() == 'Windows': toolsdir = os.path.expanduser('~/gedit/tools') else: userdir = os.getenv('GNOME22_USER_DIR') if userdir: toolsdir = os.path.join(userdir, 'gedit/tools') else: toolsdir = os.path.expanduser('~/.gnome2/gedit/tools') self.locations.insert(0, toolsdir); if not os.path.isdir(self.locations[0]): os.makedirs(self.locations[0]) self.tree = ToolDirectory(self, '') self.import_old_xml_store() else: self.tree = ToolDirectory(self, '')
def os_type(): try: from platform import machine, platform except: print "unknown" return if platform(): print platform()
list_of_extra_settings = [] for extra_settings in list_of_extra_settings: for attribute in dir(extra_settings): if '__' not in attribute: setattr(settings, attribute, getattr(extra_settings, attribute)) ################################################################################################## # Defaults (likely overwritten by custom setting) ################################################################################################## settings.experiments = {'LB': 'LiteBRID', 'SA': 'Simons Array', 'SO': 'Simons Observatory'} settings.loaded_experiment = 'SA' if 'Linux' in platform.platform(): settings.platform = 'Linux' settings.load_still_image_as_camera = True settings.user = os.path.expanduser('~').split('/')[-1] elif 'Windows' in platform.platform(): settings.platform = 'Windows' settings.user = os.path.expanduser('~').split('\\')[-1] # Query Stuff today = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d') settings.today = datetime.datetime.now() # Layout Stuff settings.title = 'BoloCalc Gui' settings.small_font = QtGui.QFont("Times", 8) settings.med_font = QtGui.QFont("Times", 11)
import simplejson as json except ImportError: import json try: import wmi except ImportError: pass try: os.system('diskperf -y') except: pass HostName = platform.node() Platform = platform.platform() HostId = hashlib.md5(str(uuid.getnode())).hexdigest() CPU = psutil.cpu_count() RAM = psutil.virtual_memory().total BASE_NAME = os.path.basename(os.path.abspath('.')).split('.')[0] LOG = logging.getLogger(BASE_NAME) realtime_io = { "disk_io": (None, None), "network_io": (None, None), } class Config(SafeConfigParser): defaults = { 'data_url': '192.168.1.214:3344',
# -*- coding: utf-8 -*- "木偶测试文件,请直接在命令行执行 python puppet\test.py" if __name__ == '__main__': import platform import time import puppet print('\n{}\nPython Version: {}'.format(platform.platform(), platform.python_version())) print('默认使用百度云OCR进行验证码识别') print("\n注意!必须将client_path的值修改为你自己的交易客户端路径!\n") bdy = { 'appId': '', 'apiKey': '', 'secretKey': '' } # 百度云 OCR https://cloud.baidu.com/product/ocr accinfos = { 'account_no': '198800', 'password': '******', 'comm_pwd': True, # 模拟交易端必须为True 'client_path': r'你的交易客户端目录\xiadan.exe' } # 自动登录交易客户端 # acc = puppet.login(accinfos) # acc = puppet.Account(accinfos)
import os import platform import logging if platform.platform().startswith('Windows'): logging_file = os.path.join(os.getenv('HOMEDRIVE'), os.getenv('HOMEPATH'), 'test.log') else: logging_file = os.path.join(os.getenv('HOME'), 'test.log') print('Logging to', logging_file) logging.basicConfig( level=logging.DEBUG, format='%(asctime)s : %(levelname)s : %(message)s', filename=logging_file, filemode='w', ) logging.debug("Start of the program") logging.info("Doing something") logging.warning("Dying now")
# link to the create a new issue page ISSUES_LINK = "https://github.com/Ekultek/WhatWaf/issues/new" # regex to detect the URL protocol (http or https) PROTOCOL_DETECTION = re.compile("http(s)?") # fingerpritn path for unknown firewalls UNKNOWN_PROTECTION_FINGERPRINT_PATH = "{}/.whatwaf".format( os.path.expanduser("~")) # default user-agent DEFAULT_USER_AGENT = "whatwaf/{} (Language={}; Platform={})".format( VERSION, sys.version.split(" ")[0], platform.platform().split("-")[0]) # payloads for detecting the WAF, at least one of # these payloads should trigger the WAF and provide # us with the information we need to identify what # the WAF is, along with the information we will need # to identify what tampering method we should use WAF_REQUEST_DETECTION_PAYLOADS = ( "<frameset><frame src=\"javascript:alert('XSS');\"></frameset>", " AND 1=1 ORDERBY(1,2,3,4,5) --;", '><script>alert("testing");</script>', (" AND 1=1 UNION ALL SELECT 1,NULL,'<script>alert(\"XSS\")</script>'," "table_name FROM information_schema.tables WHERE 2>1--/**/; EXEC " "xp_cmdshell('cat ../../../etc/passwd')#"), '<img src="javascript:alert(\'XSS\');">', "'))) AND 1=1,SELECT * FROM information_schema.tables ((('", "' )) AND 1=1 (( ' -- rgzd",
model_file=None, batch_size=batch_size) dlc.joint_train(train_data_src1, valid_data_src1, train_data_src2, valid_data_src2, train_data, valid_data, test_data, n_epochs=n_epochs, lr=lr) str_today = datetime.date.today().strftime('%y-%m-%d') dm = 'semeval15' # dm = 'semeval14' dataset_name = 'restaurant' # dataset_name = 'laptops' hidden_size_lstm = 100 n_epochs = 400 os_env = 'Windows' if platform().startswith('Windows') else 'Linux' if dataset_name == 'laptops': # word_vecs_file = config.SE14_LAPTOP_GLOVE_WORD_VEC_FILE # word_vecs_file = config.SE14_LAPTOP_AMAZON_WORD_VEC_FILE pre_tok_texts_file = config.AMAZON_TOK_TEXTS_FILE pre_aspect_terms_file = config.AMAZON_RM_TERMS_FILE pre_opinion_terms_file = config.AMAZON_TERMS_TRUE4_FILE wv_dim = '100' if os_env == 'Windows': word_vecs_file = 'd:/data/aspect/semeval14/model-data/amazon-wv-nr-{}-sg-n10-w8-i30.pkl'.format(wv_dim) else: word_vecs_file = '/home/hldai/data/aspect/semeval14/model-data/amazon-wv-nr-{}-sg-n10-w8-i30.pkl'.format(wv_dim) train_valid_split_file = config.SE14_LAPTOP_TRAIN_VALID_SPLIT_FILE
#!/usr/bin/env python # encoding: utf-8 # # Copyright (c) 2010 Doug Hellmann. All rights reserved. # """Example usercustomize.py """ #end_pymotw_header print 'Loading usercustomize.py' import site import platform import os import sys path = os.path.expanduser(os.path.join('~', 'python', sys.version[:3], platform.platform())) print 'Adding new path', path site.addsitedir(path)
def os_Data(): os_Data = {} os_Data['osName'] = platform.platform() os_Data['osVersion'] = "-".join(platform.dist()) return os_Data
os.makedirs(subj_path) # copy the recipe file directly to the base path shutil.copy2(user_settings['recipe'], subj_path) # now make a json copy with open(user_settings['recipe'], 'r') as f: dct = toml.load(f) strip_name = os.path.splitext(os.path.basename(user_settings['recipe']))[0] json_name = os.path.join(subj_path, strip_name) with open(json_name + '.json', 'w+') as f: json.dump(dct, f, indent=2) # write the recipe-level settings file dct_md5 = md5(json.dumps(dct).encode('utf-8')).hexdigest() recipe_level_sets = { 'recipe_md5': dct_md5, 'os': platform.platform(), 'py_version': platform.python_version(), 'rush_allowed': can_rush, 'fps': int(1/win.frame_period), 'gpu': win.context.info['GL_RENDERER'], 'device': device_type, 'language': user_settings['spanish'], 'drop_version': __version__ } set_name = os.path.join(subj_path, 'settings.json') with open(set_name, 'w+') as f: json.dump(recipe_level_sets, f, indent=2) # make BlockHandlers for every block specified default_recipes = os.path.join(user_settings['recipe_dir'], 'defaults/') # resolved is the proto-block-handler, resolved_dict is the plain list of
def _load_codeshare_script(self, uri): trust_store = self._get_or_create_truststore() project_url = "https://codeshare.frida.re/api/project/{}/".format(uri) response_json = None try: request = build_opener() request.addheaders = [('User-Agent', 'Frida v{} | {}'.format(frida.__version__, platform.platform()))] response = request.open(project_url) response_content = response.read().decode('utf-8') response_json = json.loads(response_content) except Exception as e: self._print("Got an unhandled exception while trying to retrieve {} - {}".format(uri, e)) return None trusted_signature = trust_store.get(uri, "") fingerprint = hashlib.sha256(response_json['source'].encode('utf-8')).hexdigest() if fingerprint == trusted_signature: return response_json['source'] self._print("""Hello! This is the first time you're running this particular snippet, or the snippet's source code has changed. Project Name: {project_name} Author: {author} Slug: {slug} Fingerprint: {fingerprint} URL: {url} """.format( project_name=response_json['project_name'], author="@" + uri.split('/')[0], slug=uri, fingerprint=fingerprint, url="https://codeshare.frida.re/@{}".format(uri) )) while True: prompt_string = "Are you sure you'd like to trust this project? [y/N] " response = get_input(prompt_string) if response.lower() in ('n', 'no') or response == '': return None if response.lower() in ('y', 'yes'): self._print( "Adding fingerprint {} to the trust store! You won't be prompted again unless the code changes.".format( fingerprint)) script = response_json['source'] self._update_truststore({ uri: fingerprint }) return script
import random import string from time import sleep try: from urllib.parse import urlparse except ImportError: from urlparse import urlparse import socket import requests import urllib3 import yaml urllib3.disable_warnings() HERE = os.path.dirname(os.path.normpath(__file__)) _PLATFORM = platform.platform().lower() PLATFORM = "windows" if ("windows" in _PLATFORM or "cygwin" in _PLATFORM) else "linux" HOSTNAME = os.uname()[1] JAVA_VERSION_WHITELIST = frozenset( ("oracle:8", "openjdk:8", "openjdk:9", "openjdk:11")) roleNames = [ 'splunk_cluster_master', # (if it exists, set up indexer clustering) 'splunk_deployer', 'splunk_heavy_forwarder', 'splunk_standalone', 'splunk_search_head', 'splunk_indexer', 'splunk_license_master', # (if it exists, run adding license with a license master) 'splunk_search_head_captain', # TODO: remove this as we deprecate this role
for filename in fileList: if ".dcm" in filename.lower(): actDs = dicom.read_file(os.path.join(dirName, filename)) actZPos = actDs.ImagePositionPatient[2] if actZPos < minZPos: minZPos = actZPos return [ actDs.ImagePositionPatient[0], actDs.ImagePositionPatient[1], minZPos ] # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # if platform.platform()[0] == "W": print("OS: win") pathIn = "c:/users/vch/desktop/Bredies/CASE{}/".format(txtCase) pathOut = "c:/users/vch/desktop/results/CASE{}/".format(txtCase) else: print("OS: not win") pathIn = "/home/horakv/Desktop/Bredies/CASE{}/".format(txtCase) pathOut = "/home/horakv/Desktop/results/CASE{}/".format(txtCase) (listOfDicomArrays, listOfPixelDims, listOfPixelSpacings, listOfPlaneShapes, listOfMaxCounts, listOfMatrices) = readFilesToDicomArray(pathIn + "cine/", pathOut) if withCushion: actVec = np.loadtxt("{}withCushion/avg_{}/path_{}.txt".format( pathOut, numAvg, pathToDisplay),
"LMU": "http://erde.geophysik.uni-muenchen.de", "NCEDC": "http://service.ncedc.org", "NIEP": "http://eida-sc3.infp.ro", "NOA": "http://eida.gein.noa.gr", "ODC": "http://www.orfeus-eu.org", "ORFEUS": "http://www.orfeus-eu.org", "RESIF": "http://ws.resif.fr", "SCEDC": "http://service.scedc.caltech.edu", "USGS": "http://earthquake.usgs.gov", "USP": "http://sismo.iag.usp.br", } FDSNWS = ("dataselect", "event", "station") if PY2: platform_ = platform.platform().decode("ascii", "ignore") else: encoding = sys.getdefaultencoding() or "UTF-8" platform_ = platform.platform().encode(encoding).decode("ascii", "ignore") # The default User Agent that will be sent with every request. DEFAULT_USER_AGENT = "ObsPy/%s (%s, Python %s)" % (__version__, platform_, platform.python_version()) # The default parameters. Different services can choose to add more. It always # contains the long name first and the short name second. If it has no short # name, it is simply a tuple with only one entry. DEFAULT_DATASELECT_PARAMETERS = [ "starttime", "endtime", "network", "station", "location", "channel" ] OPTIONAL_DATASELECT_PARAMETERS = ["quality", "minimumlength", "longestonly"]
def main(): hresult, hcontext = SCardEstablishContext(SCARD_SCOPE_USER) if hresult != SCARD_S_SUCCESS: raise scard.error( 'Failed to establish context: ' + \ SCardGetErrorMessage(hresult)) print('Context established!') try: # list interfaces for a known card if -1 != platform.platform().find('Windows-7'): expectedCard = 'Identity Device (Microsoft Generic Profile)' elif -1 != platform.platform().find('Windows-Vista-6.0'): expectedCard = 'Axalto Cryptoflex .NET' else: expectedCard = 'Schlumberger Cryptoflex 8k v2' hresult, interfaces = SCardListInterfaces( hcontext, expectedCard) if hresult != SCARD_S_SUCCESS: raise scard.error( 'Failed to list interfaces: ' + \ SCardGetErrorMessage(hresult)) print('Interfaces for ', expectedCard, ':', interfaces) # introduce a card (forget first in case it is already present) hresult = SCardForgetCardType(hcontext, znewcardName) print('Introducing card ' + znewcardName) hresult = SCardIntroduceCardType( hcontext, znewcardName, znewcardPrimGuid, znewcardPrimGuid + znewcardSecGuid, znewcardATR, znewcardMask) if hresult != SCARD_S_SUCCESS: raise error( 'Failed to introduce card type: ' + \ SCardGetErrorMessage(hresult)) # list card interfaces hresult, interfaces = SCardListInterfaces(hcontext, znewcardName) if hresult != SCARD_S_SUCCESS: raise error( 'Failed to list interfaces: ' + \ SCardGetErrorMessage(hresult)) for i in interfaces: print('Interface for ' + znewcardName + ' :', \ smartcard.guid.GUIDToStr(i)) print('Forgeting card ' + znewcardName) hresult = SCardForgetCardType(hcontext, znewcardName) if hresult != SCARD_S_SUCCESS: raise error( 'Failed to remove card type: ' + \ SCardGetErrorMessage(hresult)) finally: hresult2 = SCardReleaseContext(hcontext) if hresult2 != SCARD_S_SUCCESS: raise error( 'Failed to release context: ' + \ SCardGetErrorMessage(hresult)) print('Released context.')
def normalize_path(path: str) -> str: env = platform.platform().lower() if 'linux' in env and 'microsoft' in env: return subprocess.check_output(['wslpath', path], text=True).strip() return path
BEST_TRAIN_F1_SCORE_KEY = 'train_f1_score' BEST_EVAL_F1_SCORE_KEY = 'eval_f1_score' BEST_TRAIN_ACC_KEY = 'train_acc' BEST_EVAL_ACC_KEY = 'eval_acc' BEST_CKPT_DIR_NAME = 'best_ckpts' MODEL_INPUT_WIDTH_KEY = 'MODEL_INPUT_WIDTH' MODEL_INPUT_HEIGHT_KEY = 'MODEL_INPUT_HEIGHT' MODEL_INPUT_CHANNELS_KEY = 'MODEL_INPUT_CHANNELS' ALLOWED_EXTENSIONS = [ '.jpg', '.JPG', '.jpeg', '.JPEG', '.png', '.PNG', '.bmp', '.BMP' ] if platform()[0] == 'W': ALLOWED_EXTENSIONS = ['.jpg', '.jpeg', '.png', '.bmp'] BUTS_DIR_NAME = 'BUTS' JSON_ANNOTATION_DIR = 'ForTrainingAnnotations' JSON_ANNOT_KEY = 'Annotations' JSON_LABEL_KEY = 'Label' JSON_TOP_LEFT_KEY = 'PointTopLeft' JSON_TOP_RIGHT_KEY = 'PointTopRight' JSON_BOTTOM_LEFT_KEY = 'PointBottomLeft' JSON_BOTTOM_RIGHT_KEY = 'PointBottomRight' JSON_IS_BAD_IMAGE_KEY = 'IsBadImage' TFR_IMPLICIT_DIR_NAME = 'TFRECORDS_DATASET'
def main(): global shader vertex_src = """ # version 330 layout(location = 0) in vec3 a_position; layout(location = 1) in vec2 a_texture; uniform mat4 model; uniform mat4 projection; uniform mat4 view; out vec2 v_texture; void main() { gl_Position = projection * view * model * vec4(a_position, 1.0); v_texture = a_texture; } """ fragment_src = """ # version 330 in vec2 v_texture; out vec4 out_color; uniform sampler2D s_texture; uniform ivec3 icolor; uniform int switcher; void main() { if(switcher == 0){ out_color = texture(s_texture, v_texture); }else{ out_color = vec4(icolor.r/255.0, icolor.g/255.0, icolor.b/255.0, 1.0); } } """ # initializing glfw library if not glfw.init(): raise Exception("glfw can not be initialized!") if "Windows" in pltf.platform(): glfw.window_hint(glfw.CONTEXT_VERSION_MAJOR, 4) glfw.window_hint(glfw.CONTEXT_VERSION_MINOR, 6) glfw.window_hint(glfw.OPENGL_PROFILE, glfw.OPENGL_CORE_PROFILE) glfw.window_hint(glfw.OPENGL_FORWARD_COMPAT, True) else: glfw.window_hint(glfw.CONTEXT_VERSION_MAJOR, 4) glfw.window_hint(glfw.CONTEXT_VERSION_MINOR, 1) glfw.window_hint(glfw.OPENGL_PROFILE, glfw.OPENGL_CORE_PROFILE) glfw.window_hint(glfw.OPENGL_FORWARD_COMPAT, True) # creating the window window = glfw.create_window(1280, 720, "My OpenGL window", None, None) # check if window was created if not window: glfw.terminate() raise Exception("glfw window can not be created!") # set window's position glfw.set_window_pos(window, 100, 100) # set the callback function for window resize glfw.set_window_size_callback(window, window_resize_clb) # set the mouse position callback glfw.set_cursor_pos_callback(window, mouse_pos_clb) # set the button press position callback glfw.set_mouse_button_callback(window, mouse_button_callback) # make the context current glfw.make_context_current(window) cube_buffer = [-0.5, -0.5, 0.5, 0.0, 0.0, 0.5, -0.5, 0.5, 1.0, 0.0, 0.5, 0.5, 0.5, 1.0, 1.0, -0.5, 0.5, 0.5, 0.0, 1.0, -0.5, -0.5, -0.5, 0.0, 0.0, 0.5, -0.5, -0.5, 1.0, 0.0, 0.5, 0.5, -0.5, 1.0, 1.0, -0.5, 0.5, -0.5, 0.0, 1.0, 0.5, -0.5, -0.5, 0.0, 0.0, 0.5, 0.5, -0.5, 1.0, 0.0, 0.5, 0.5, 0.5, 1.0, 1.0, 0.5, -0.5, 0.5, 0.0, 1.0, -0.5, 0.5, -0.5, 0.0, 0.0, -0.5, -0.5, -0.5, 1.0, 0.0, -0.5, -0.5, 0.5, 1.0, 1.0, -0.5, 0.5, 0.5, 0.0, 1.0, -0.5, -0.5, -0.5, 0.0, 0.0, 0.5, -0.5, -0.5, 1.0, 0.0, 0.5, -0.5, 0.5, 1.0, 1.0, -0.5, -0.5, 0.5, 0.0, 1.0, 0.5, 0.5, -0.5, 0.0, 0.0, -0.5, 0.5, -0.5, 1.0, 0.0, -0.5, 0.5, 0.5, 1.0, 1.0, 0.5, 0.5, 0.5, 0.0, 1.0] cube_buffer = np.array(cube_buffer, dtype=np.float32) cube_indices = [ 0, 1, 2, 2, 3, 0, 4, 5, 6, 6, 7, 4, 8, 9, 10, 10, 11, 8, 12, 13, 14, 14, 15, 12, 16, 17, 18, 18, 19, 16, 20, 21, 22, 22, 23, 20] cube_indices = np.array(cube_indices, dtype=np.uint32) # VAO and VBO VAO = glGenVertexArrays(1) VBO = glGenBuffers(1) EBO = glGenBuffers(1) # Cube VAO glBindVertexArray(VAO) shader = compileProgram(compileShader(vertex_src, GL_VERTEX_SHADER), compileShader(fragment_src, GL_FRAGMENT_SHADER)) # Cube Vertex Buffer Object glBindBuffer(GL_ARRAY_BUFFER, VBO) glBufferData(GL_ARRAY_BUFFER, cube_buffer.nbytes, cube_buffer, GL_STATIC_DRAW) # Cube Element Buffer Object glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO) glBufferData(GL_ELEMENT_ARRAY_BUFFER, cube_indices.nbytes, cube_indices, GL_STATIC_DRAW) # Cube vertices glEnableVertexAttribArray(0) glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, cube_buffer.itemsize * 5, ctypes.c_void_p(0)) # Cube textures glEnableVertexAttribArray(1) glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, cube_buffer.itemsize * 5, ctypes.c_void_p(12)) textures = glGenTextures(3) crate = load_texture("textures/crate.jpg", textures[0]) metal = load_texture("textures/metal.jpg", textures[1]) brick = load_texture("textures/brick.jpg", textures[2]) # picking texture and a frame buffer object pick_texture = glGenTextures(1) glBindTexture(GL_TEXTURE_2D, pick_texture) glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, 1280, 720, 0, GL_RGB, GL_FLOAT, None) FBO = glGenFramebuffers(1) glBindFramebuffer(GL_FRAMEBUFFER, FBO) glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, pick_texture, 0) glBindFramebuffer(GL_FRAMEBUFFER, 0) glBindTexture(GL_TEXTURE_2D, 0) glUseProgram(shader) glClearColor(0, 0.1, 0.1, 1) glEnable(GL_DEPTH_TEST) glEnable(GL_BLEND) glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA) projection = pyrr.matrix44.create_perspective_projection_matrix(45, 1280 / 720, 0.1, 100) view = pyrr.matrix44.create_from_translation(pyrr.Vector3([0.0, 0.0, -4.0])) cube_positions = [(-2.0, 0.0, 0.0), (0.0, 0.0, 0.0), (2.0, 0.0, 0.0)] pick_colors = [(255, 0, 0), (0, 255, 0), (0, 0, 255)] model_loc = glGetUniformLocation(shader, "model") proj_loc = glGetUniformLocation(shader, "projection") view_loc = glGetUniformLocation(shader, "view") icolor_loc = glGetUniformLocation(shader, "icolor") switcher_loc = glGetUniformLocation(shader, "switcher") glUniformMatrix4fv(proj_loc, 1, GL_FALSE, projection) glUniformMatrix4fv(view_loc, 1, GL_FALSE, view) glUseProgram(0) glClearColor(0, 0.1, 0.1, 1) # the main application loop while not glfw.window_should_close(window): glfw.poll_events() glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT) rot_y = pyrr.Matrix44.from_y_rotation(glfw.get_time() * 2) glUseProgram(shader) # draw to the default frame buffer glUniform1i(switcher_loc, 0) for i in range(len(cube_positions)): model = pyrr.matrix44.create_from_translation(cube_positions[i]) if i == 0: glBindTexture(GL_TEXTURE_2D, crate) if red_rot: glUniformMatrix4fv(model_loc, 1, GL_FALSE, rot_y @ model) else: glUniformMatrix4fv(model_loc, 1, GL_FALSE, model) elif i == 1: glBindTexture(GL_TEXTURE_2D, metal) if green_rot: glUniformMatrix4fv(model_loc, 1, GL_FALSE, rot_y @ model) else: glUniformMatrix4fv(model_loc, 1, GL_FALSE, model) else: glBindTexture(GL_TEXTURE_2D, brick) if blue_rot: glUniformMatrix4fv(model_loc, 1, GL_FALSE, rot_y @ model) else: glUniformMatrix4fv(model_loc, 1, GL_FALSE, model) glDrawElements(GL_TRIANGLES, len(cube_indices), GL_UNSIGNED_INT, None) # draw to the custom frame buffer object - pick buffer glUniform1i(switcher_loc, 1) glBindFramebuffer(GL_FRAMEBUFFER, FBO) glClearColor(0.0, 0.0, 0.0, 1.0) glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT) for i in range(len(cube_positions)): pick_model = pyrr.matrix44.create_from_translation(cube_positions[i]) glUniform3iv(icolor_loc, 1, pick_colors[i]) glUniformMatrix4fv(model_loc, 1, GL_FALSE, pick_model) glDrawElements(GL_TRIANGLES, len(cube_indices), GL_UNSIGNED_INT, None) if picker: pick() glBindFramebuffer(GL_FRAMEBUFFER, 0) glUseProgram(0) glfw.swap_buffers(window) # terminate glfw, free up allocated resources glfw.terminate()
include_dirs = ['ffld2', 'ffld2/lib'] library_dirs = [] extra_compile_args = [] extra_link_args = [] libraries = [] sources = ([ 'cyffld2/ffld2/HOGPyramid.cpp', 'cyffld2/ffld2/JPEGImage.cpp', 'cyffld2/ffld2/LBFGS.cpp', 'cyffld2/ffld2/Model.cpp', 'cyffld2/ffld2/Object.cpp', 'cyffld2/ffld2/Patchwork.cpp', 'cyffld2/ffld2/Rectangle.cpp', 'cyffld2/ffld2/Scene.cpp', 'cyffld2/ffld2/Mixture.cpp', 'cyffld2/ffld2/lib/ffld2.cpp', 'cyffld2/_ffld2.pyx' ]) _platform = platform.platform().lower() if 'linux' in _platform: extra_compile_args += ['-fopenmp'] extra_link_args += ['-fopenmp'] libraries += ['xml2', 'fftw3f', 'jpeg'] elif 'darwin' in _platform: libraries += ['xml2', 'fftw3f', 'jpeg'] elif 'windows' in _platform: extra_compile_args += ['/openmp'] extra_link_args += ['/openmp'] if os.environ.get('CONDA_BUILD', None): # There is a bug in visual studio 2008 whereby you can't pass non-aligned # values, therefore, we have to turn off alignment enforcement # for eigen on Win32. See http://eigen.tuxfamily.org/bz/show_bug.cgi?id=83
def __init__(self, picard_args, unparsed_args, localedir, autoupdate): # Use the new fusion style from PyQt5 for a modern and consistent look # across all OSes. self.setStyle('Fusion') # Set the WM_CLASS to 'MusicBrainz-Picard' so desktop environments # can use it to look up the app super().__init__(['MusicBrainz-Picard'] + unparsed_args) self.__class__.__instance = self config._setup(self, picard_args.config_file) # Allow High DPI Support self.setAttribute(QtCore.Qt.AA_UseHighDpiPixmaps) self.setAttribute(QtCore.Qt.AA_EnableHighDpiScaling) super().setStyleSheet( 'QGroupBox::title { /* PICARD-1206, Qt bug workaround */ }') self._cmdline_files = picard_args.FILE self._autoupdate = autoupdate self._no_restore = picard_args.no_restore self.debug(picard_args.debug or "PICARD_DEBUG" in os.environ or config.setting['log_verbosity'] == logging.DEBUG) # FIXME: Figure out what's wrong with QThreadPool.globalInstance(). # It's a valid reference, but its start() method doesn't work. self.thread_pool = QtCore.QThreadPool(self) # Use a separate thread pool for file saving, with a thread count of 1, # to avoid race conditions in File._save_and_rename. self.save_thread_pool = QtCore.QThreadPool(self) self.save_thread_pool.setMaxThreadCount(1) if not sys.platform == "win32": # Set up signal handling # It's not possible to call all available functions from signal # handlers, therefore we need to set up a QSocketNotifier to listen # on a socket. Sending data through a socket can be done in a # signal handler, so we use the socket to notify the application of # the signal. # This code is adopted from # https://qt-project.org/doc/qt-4.8/unix-signals.html # To not make the socket module a requirement for the Windows # installer, import it here and not globally import socket self.signalfd = socket.socketpair(socket.AF_UNIX, socket.SOCK_STREAM, 0) self.signalnotifier = QtCore.QSocketNotifier( self.signalfd[1].fileno(), QtCore.QSocketNotifier.Read, self) self.signalnotifier.activated.connect(self.sighandler) signal.signal(signal.SIGHUP, self.signal) signal.signal(signal.SIGINT, self.signal) signal.signal(signal.SIGTERM, self.signal) # Setup logging log.debug("Starting Picard from %r", os.path.abspath(__file__)) log.debug("Platform: %s %s %s", platform.platform(), platform.python_implementation(), platform.python_version()) log.debug("Versions: %s", versions.as_string()) log.debug("Configuration file path: %r", config.config.fileName()) log.debug("User directory: %r", os.path.abspath(USER_DIR)) # for compatibility with pre-1.3 plugins QtCore.QObject.tagger = self QtCore.QObject.config = config QtCore.QObject.log = log check_io_encoding() # Must be before config upgrade because upgrade dialogs need to be # translated setup_gettext(localedir, config.setting["ui_language"], log.debug) upgrade_config() self.webservice = WebService() self.mb_api = MBAPIHelper(self.webservice) self.acoustid_api = AcoustIdAPIHelper(self.webservice) load_user_collections() # Initialize fingerprinting self._acoustid = acoustid.AcoustIDClient() self._acoustid.init() # Load plugins self.pluginmanager = PluginManager() if hasattr(sys, "frozen"): self.pluginmanager.load_plugindir( os.path.join(os.path.dirname(sys.argv[0]), "plugins")) else: mydir = os.path.dirname(os.path.abspath(__file__)) self.pluginmanager.load_plugindir(os.path.join(mydir, "plugins")) if not os.path.exists(USER_PLUGIN_DIR): os.makedirs(USER_PLUGIN_DIR) self.pluginmanager.load_plugindir(USER_PLUGIN_DIR) self.pluginmanager.query_available_plugins() self.acoustidmanager = AcoustIDManager() self.browser_integration = BrowserIntegration() self.files = {} self.clusters = ClusterList() self.albums = {} self.release_groups = {} self.mbid_redirects = {} self.unclustered_files = UnclusteredFiles() self.nats = None self.window = MainWindow() self.exit_cleanup = [] self.stopping = False # Load release version information self.updatecheckmanager = UpdateCheckManager(parent=self.window)
def version(): """Return a string with various version information.""" lines = ["qutebrowser v{}".format(qutebrowser.__version__)] gitver = _git_str() if gitver is not None: lines.append("Git commit: {}".format(gitver)) lines.append("Backend: {}".format(_backend())) lines += [ '', '{}: {}'.format(platform.python_implementation(), platform.python_version()), 'Qt: {}'.format(earlyinit.qt_version()), 'PyQt: {}'.format(PYQT_VERSION_STR), '', ] lines += _module_versions() lines += [ 'pdf.js: {}'.format(_pdfjs_version()), 'sqlite: {}'.format(sql.version()), 'QtNetwork SSL: {}\n'.format(QSslSocket.sslLibraryVersionString( ) if QSslSocket.supportsSsl() else 'no'), ] qapp = QApplication.instance() if qapp: style = qapp.style() lines.append('Style: {}'.format(style.metaObject().className())) importpath = os.path.dirname(os.path.abspath(qutebrowser.__file__)) lines += [ 'Platform: {}, {}'.format(platform.platform(), platform.architecture()[0]), ] dist = distribution() if dist is not None: lines += [ 'Linux distribution: {} ({})'.format(dist.pretty, dist.parsed.name) ] lines += [ 'Frozen: {}'.format(hasattr(sys, 'frozen')), "Imported from {}".format(importpath), "Using Python from {}".format(sys.executable), "Qt library executable path: {}, data path: {}".format( QLibraryInfo.location(QLibraryInfo.LibraryExecutablesPath), QLibraryInfo.location(QLibraryInfo.DataPath)) ] if not dist or dist.parsed == Distribution.unknown: lines += _os_info() lines += [ '', 'Paths:', ] for name, path in sorted(_path_info().items()): lines += ['{}: {}'.format(name, path)] lines += [ '', 'Autoconfig loaded: {}'.format(_autoconfig_loaded()), 'Config.py: {}'.format(_config_py_loaded()), 'Uptime: {}'.format(_uptime()) ] return '\n'.join(lines)
e_host.place(x=110, y=10) ############################################################################### name = getpass.getuser() # get username # win32net.NetGroupAdd(None, 0, 'TMPGroup') # win32net.NetGroupAddUser(None, 'TMPGroup', name) print(win32net.NetGroupGetInfo(None, 'TMPGroup', 0)) Label(root, text='Name of user:'******'Information:').place(x=10, y=90) e_info = Entry(root, width=50) e_info.insert(0, info) e_info.place(x=110, y=90) ############################################################################### ############################################################################### # root.mainloop()
def __init__(self, fp=None, *args, **kwargs): self.defaults.update(kwargs) SafeConfigParser.__init__(self, self.defaults) self.pf = platform.platform() self.fp = fp or self.conf_path self.read(self.fp)
def write_parsed_to_csv(page_url, map_info, writer, pscores, page_number=2, web_driver=None): """Given the current page URL, extract the information from each apartment in the list""" # We start on page_number = 2, since we will already be parsing page_number 1 # if we are loading the page for the first time, we want to initailize the web driver if (web_driver != None): driver = web_driver else: options = Options() options.headless = True if ('debian' in platform.platform()): driver = webdriver.Firefox(firefox_binary='/usr/bin/firefox-esr', options=options) else: driver = webdriver.Firefox(options=options) driver.get(page_url) # read the current page soup = BeautifulSoup(driver.page_source, 'html.parser') # soupify the current page soup.prettify() # only look in this region soup = soup.find('div', class_='placardContainer') # append the current apartments to the list for item in soup.find_all('article', class_='placard'): url = '' rent = '' contact = '' if item.find('a', class_='placardTitle') is None: continue url = item.find('a', class_='placardTitle').get('href') # get the rent and parse it to unicode obj = item.find('span', class_='altRentDisplay') if obj is not None: rent = obj.getText().strip() # get the phone number and parse it to unicode obj = item.find('div', class_='phone') if obj is not None: contact = obj.getText().strip() # get the other fields to write to the CSV fields = parse_apartment_information(url, map_info) # make this wiki markup fields['name'] = '[' + str(fields['name']) + '](' + url + ')' fields[ 'address'] = '[' + fields['address'] + '](' + fields['map'] + ')' # fill out the CSV file row = [ fields['name'], contact, fields['address'], fields['size'], rent, fields['monthFees'], fields['onceFees'], fields['petPolicy'], fields['distance'], fields['duration'], fields['parking'], fields['gym'], fields['kitchen'], fields['amenities'], fields['features'], fields['space'], fields['lease'], fields['services'], fields['info'], fields['indoor'], fields['outdoor'], fields['img'], fields['description'] ] # add the score fields if necessary if pscores: for i in xrange(len(row), 0, -1): row.insert(i, '5') row.append('0') # write the row writer.writerow(row) page_number_str = str(page_number) # check for our next page number try: page_number_element = driver.find_element_by_xpath("//a[@data-page='" + page_number_str + "']") page_number_element.click() time.sleep(1) # we will get a no element found exception, meaning our search has come to an end except: driver.quit() return # recurse until the last page write_parsed_to_csv("none", map_info, writer, pscores, page_number + 1, driver)
import os import sys import platform print(platform.platform()) print(platform.node()) print(platform.system()) if platform.system() == 'Windows': BASE_DIR = "\\".join( os.path.abspath(os.path.dirname(__file__)).split("\\")[:-1]) else: BASE_DIR = "/".join( os.path.abspath(os.path.dirname(__file__)).split("/")[:-1]) sys.path.insert(0, BASE_DIR) from core import main from conf import setting if __name__ == '__main__': obj = main.Manage_center() obj.run()
from picard.const import (ACOUSTID_KEY, ACOUSTID_HOST, ACOUSTID_PORT, CAA_HOST, CAA_PORT) from picard.oauth import OAuthManager from picard.util import build_qurl COUNT_REQUESTS_DELAY_MS = 250 REQUEST_DELAY = defaultdict(lambda: 1000) REQUEST_DELAY[(ACOUSTID_HOST, ACOUSTID_PORT)] = 333 REQUEST_DELAY[(CAA_HOST, CAA_PORT)] = 0 USER_AGENT_STRING = '%s-%s/%s (%s;%s-%s)' % (PICARD_ORG_NAME, PICARD_APP_NAME, PICARD_VERSION_STR, platform.platform(), platform.python_implementation(), platform.python_version()) CLIENT_STRING = string_(QUrl.toPercentEncoding('%s %s-%s' % (PICARD_ORG_NAME, PICARD_APP_NAME, PICARD_VERSION_STR))) def escape_lucene_query(text): return re.sub(r'([+\-&|!(){}\[\]\^"~*?:\\/])', r'\\\1', text) def _wrap_xml_metadata(data): return ('<?xml version="1.0" encoding="UTF-8"?>' + '<metadata xmlns="http://musicbrainz.org/ns/mmd-2.0#">%s</metadata>' % data)
def callback(resp): self.assertIn("azsdk-python-ai-textanalytics/{} Python/{} ({})".format( VERSION, platform.python_version(), platform.platform()), resp.http_request.headers["User-Agent"] )
# -*- coding: utf-8 -*- import sys import os from collections import namedtuple from datetime import datetime from html.parser import HTMLParser from html import unescape from hashlib import md5 from configparser import NoOptionError, ConfigParser import threading # noinspection PyCompatibility import regex import subprocess as sp import platform if 'windows' in platform.platform().lower(): # noinspection PyPep8Naming from _Git_Windows import git, GitError else: # noinspection PyUnresolvedReferences from sh.contrib import git CommitInfo = namedtuple('CommitInfo', ['id', 'id_full', 'author', 'message']) git_url = git.config("--get", "remote.origin.url").strip() git_url_split = git_url.split("/") git_user_repo = "Charcoal-SE/SmokeDetector" if git_url[0:19] == "https://github.com/": git_user_repo = "{}/{}".format(git_url_split[3], git_url_split[4][0:-4])
def openReportError(menuitem, reporterGUI, filename, pythonlog, pythonlogandoutputs, installationlog, inputfiles, outputfiles, deletedfiles, comments): temp_directory = tempfile.mkdtemp(prefix='reporterror') dirname = "OOF3Dreport_" + time.strftime("%Y%m%d") report_directory = os.path.join(temp_directory, dirname) os.makedirs(report_directory) notesfile = open(os.path.join(report_directory, 'notes.txt'), 'w') # system information added to notesfile try: linux_distribution = platform.linux_distribution() except: linux_distribution = "N/A" notesfile.write("""System Information: Python version: %s dist: %s linux_distribution: %s system: %s machine: %s platform: %s uname: %s version: %s mac_ver: %s\n\n""" % ( sys.version.split('\n'), str(platform.dist()), linux_distribution, platform.system(), platform.machine(), platform.platform(), platform.uname(), platform.version(), platform.mac_ver(), )) if (comments != None): # if the user has additional comments, add it to notes file notesfile.write('User Comments:\n') notesfile.write(comments + '\n') notesfile.write('\n') if (pythonlog): # add python log to temporary directory as a file with open(os.path.join(report_directory, 'pythonlog.txt'), 'w') as pythonlogfile: for line in getPythonLog().splitlines(): pythonlogfile.write(line + '\n') pythonlogfile.close() if (pythonlogandoutputs): # add python log and other outputs to temporary directory as a file with open(os.path.join(report_directory, 'pythonlogANDoutputs.txt'), 'w') as pythonlogplusfile: for m in reporter.messagemanager.all_messages(): pythonlogplusfile.write(m[0] + '\n') pythonlogplusfile.close() if (installationlog): # add installation log to temporary directory as a file logfilename = 'installation_log.txt' file = open(os.path.join(report_directory, logfilename), 'w') file.write(installationLog.logdata) file.close() if traceback != None and reporterGUI.tracebackButton.get_active(): if os.path.isfile(traceback): shutil.copyfile(traceback, os.path.join(report_directory, traceback)) notesfile.write('Error traceback: %s\n' % traceback) finalnames = [] # final file names used in the report directory if (inputfiles != None): # add user input files to the temporary directory and file # name to notes file notesfile.write("Input File Names:\n") for files in inputfiles: num = 1 message = '' name = files # checks if there is a file with the same name but # different path if so, a number is added to the beginning # of the name of the version saved in the report this # change is recorded in the notes file while (duplicate(os.path.basename(name), finalnames)): split = os.path.split(name) name = os.path.join(split[0], str(num) + split[1]) message = '(was changed to \'' + os.path.basename(name) + '\')' num += 1 finalnames.append(os.path.basename(name)) dest = os.path.join(report_directory, os.path.basename(name)) if os.path.isfile(files): shutil.copyfile(files, dest) notesfile.write('\'' + files + '\' ' + message + '\n') elif os.path.isdir(files): try: shutil.copytree(files, dest) notesfile.write('\'' + files + '\' ' + message + '\n') except: pass else: pass notesfile.write('\n') if (outputfiles != None): # add user output files to the temporary directory and file # name to notes file notesfile.write("Output File Names:\n") for files in outputfiles: num = 1 message = '' name = files # checks if there is a file with the same name but # different path if so, a number is added to the beginning # of the name of the version saved in the report this # change is recorded in the notes file while (duplicate(os.path.basename(name), finalnames)): split = os.path.split(name) name = os.path.join(split[0], str(num) + split[1]) message = '(was changed to \'' + os.path.basename(name) + '\')' num += 1 finalnames.append(os.path.basename(name)) dest = os.path.join(report_directory, os.path.basename(name)) if os.path.isfile(files): shutil.copyfile(files, dest) notesfile.write('\'' + files + '\' ' + message + '\n') elif os.path.isdir(files): try: shutil.copytree(files, dest) notesfile.write('\'' + files + '\' ' + message + '\n') except: pass else: pass notesfile.write('\n') if (deletedfiles != None): #add names of input/output files that have been deleted from #the system to the notes file notesfile.write("Deleted File Names:\n") for files in deletedfiles: notesfile.write('\'' + os.path.basename(files) + '\'\n') notesfile.write('\n') notesfile.close() #tars the temporary directory in the location the user wants it saved ## Don't change the file name by adding ".tgz" or ".tar.gz". ## That'll just be confusing for the user. report_tar = tarfile.open(filename, "w:gz") report_tar.add(report_directory, os.path.basename(report_directory)) report_tar.close() shutil.rmtree(temp_directory) #removes the temporary directory reporterGUI.window.destroy()
def get_input(): try: ip = input("Enter 1 for ADD\n2 for LIST\n3 for GET\n4 for LOOKUP\n5 to QUIT\n") if ip == "1": rfc_no = input("Enter the RFC number : ") rfc_title = input("Enter the RFC title : ") path = os.getcwd() filename = "rfc" + rfc_no + ".txt" type_of_os = pf.system() file_path="" if type_of_os == "Windows": file_path = path + "\\rfc\\" + filename else: file_path = path + "/rfc/" + filename if not os.path.isfile(file_path) : print(filename+" does not exist in your system") else: msg = "ADD" + " RFC " + str(rfc_no)+" P2P-CI/1.0 \n"\ "Host: " + str(s.gethostname())+" ("+ str(soc.getsockname()[0])+") \n"\ "Port: " + str(soc.getsockname()[1])+"\n" "Title: " + str(rfc_title)+"\n" soc.send(p.dumps([msg, rfc_no, soc.getsockname()[0], upload_port, rfc_title])) print(soc.recv(4096).decode('utf-8')) get_input() elif ip == "2": msg = "LIST ALL P2P-CI/1.0 \n"\ "Host: " + str(s.gethostname())+" ("+ str(soc.getsockname()[0])+") \n"\ "Port: " + str(soc.getsockname()[1])+"\n" soc.send(p.dumps(msg)) print(soc.recv(4096).decode('utf-8'), end="") print("\n") data = p.loads(soc.recv(4096)) for rfc in data[0]: print(' '.join([rfc[r] for r in data[1]])) get_input() elif ip == "3": rfc_no = input("Enter the RFC number : ") rfc_title = input("Enter the RFC title : ") msg = "LOOKUP" + " RFC " + str(rfc_no)+" P2P-CI/1.0 \n"\ "Host: " + str(s.gethostname())+" ("+ str(soc.getsockname()[0])+") \n"\ "Port: " + str(soc.getsockname()[1])+"\n" "Title: " + str(rfc_title)+"\n" soc.send(p.dumps([msg, rfc_no, "0"])) srvr_data = p.loads(soc.recv(4096)) #print(srvr_data) if srvr_data[0]: new_soc = s.socket() new_soc.connect((srvr_data[0]["Hostname"], int(srvr_data[0]["Port Number"]))) type_of_os = pf.platform() msg = "GET RFC " + str(rfc_no) + " P2P-CI/1.0 \n"\ "Host: " + str(s.gethostname())+" ("+ str(soc.getsockname()[0])+") \n"\ "OS: " + str(type_of_os) + "\n" new_soc.send(bytes(msg, 'utf-8')) response = p.loads(new_soc.recv(4096)) for x in range(len(response)): print(response[x]) #print(str(response)) #print("\n") #print(response[1]) path = os.getcwd() filename = "rfc" + rfc_no + ".txt" type_of_os = pf.system() if type_of_os == "Windows": filename = path + "\\rfc\\" + filename else: filename = path + "/rfc/" + filename with open(filename, 'w') as f: f.write(response[1]) new_soc.close() else: print(srvr_data[1]) get_input() elif ip == "4": rfc_no = input("Enter the RFC number : ") rfc_title = input("Enter the RFC title : ") msg = "LOOKUP" + " RFC " + str(rfc_no)+" P2P-CI/1.0 \n"\ "Host: " + str(s.gethostname())+" ("+ str(soc.getsockname()[0])+") \n"\ "Port: " + str(soc.getsockname()[1])+"\n"\ "Title: " + str(rfc_title)+"\n" soc.send(p.dumps([msg, rfc_no, "1"])) srvr_data = p.loads(soc.recv(4096)) print(srvr_data[1], end = "") print("\n") for rfc in srvr_data[0]: print(' '.join([rfc[r] for r in ['RFC Number', 'RFC Title', 'Hostname', 'Port Number']])) get_input() elif ip == "5": soc.send(p.dumps("EXIT")) soc.close() else: print('Incorrect Input, Enter correct option') get_input() except KeyboardInterrupt: print("abrupt interrupt") soc.send(p.dumps("EXIT")) soc.close()
class GlobalVars: on_windows = 'windows' in platform.platform().lower() false_positives = [] whitelisted_users = set() blacklisted_users = dict() blacklisted_usernames = [] blacklisted_websites = [] # set() with the processed version of each blacklisted number pattern. blacklisted_numbers = None # blacklisted_numbers_raw is a list with the raw patterns read from the blacklisted_numbers.txt file. blacklisted_numbers_raw = None # set() with the processed version of each watched number pattern. watched_numbers = None # watched_numbers_raw is a dict() with the keys the raw patterns, with properties # for the user and time the pattern was added. Insertion order represents the order of the patterns in the # watched_numbers.txt watched_numbers_raw = None # set() with the normalized, including deobfuscated and normalized, versions of the patterns. blacklisted_numbers_normalized = None watched_numbers_normalized = None # The _full versions are a dict() with key=raw pattern, with tuple with processed and normalized for each. # Insertion order is the order they are in within the file. blacklisted_numbers_full = None watched_numbers_full = None bad_keywords = [] watched_keywords = {} ignored_posts = [] auto_ignored_posts = [] startup_utc_date = datetime.utcnow() startup_utc = startup_utc_date.strftime("%H:%M:%S") latest_questions = [] latest_questions_lock = threading.Lock() # recently_scanned_posts is not stored upon abnormal exit (exceptions, ctrl-C, etc.). recently_scanned_posts = {} recently_scanned_posts_lock = threading.Lock() recently_scanned_posts_retention_time = 15 * 60 # 15 minutes api_backoff_time = 0 deletion_watcher = None not_privileged_warning = \ "You are not a privileged user. Please see " \ "[the privileges wiki page](https://charcoal-se.org/smokey/Privileges) for " \ "information on what privileges are and what is expected of privileged users." experimental_reasons = { # Don't widely report these "potentially bad keyword in answer", "potentially bad keyword in body", "potentially bad keyword in title", "potentially bad keyword in username", "potentially bad NS for domain in title", "potentially bad NS for domain in body", "potentially bad NS for domain in answer", "potentially bad ASN for hostname in title", "potentially bad ASN for hostname in body", "potentially bad ASN for hostname in answer", "potentially bad IP for hostname in title", "potentially bad IP for hostname in body", "potentially bad IP for hostname in answer", "potentially problematic NS configuration in title", "potentially problematic NS configuration in body", "potentially problematic NS configuration in answer", "toxic body detected", "toxic answer detected", } parser = HTMLParser() parser.unescape = unescape code_privileged_users = None # these are loaded in GlobalVars.reload() commit = None commit_with_author = None on_branch = None s = "" s_reverted = "" s_norestart_blacklists = "" s_norestart_findspam = "" apiquota = -1 bodyfetcher = None cookies = {} se_sites = [] why_data = [] notifications = [] listen_to_these_if_edited = [] multiple_reporters = [] api_calls_per_site = {} reason_weights = {} metasmoke_ids = {} standby_message = "" standby_mode = False no_se_activity_scan = False no_deletion_watcher = False no_edit_watcher = False ignore_no_se_websocket_activity_lock = threading.Lock() ignore_no_se_websocket_activity = False api_request_lock = threading.Lock( ) # Get this lock before making API requests apiquota_rw_lock = threading.Lock( ) # Get this lock before reading/writing apiquota class PostScanStat: """ Tracking post scanning data """ num_posts_scanned = 0 post_scan_time = 0 rw_lock = threading.Lock() @staticmethod def add_stat(posts_scanned, scan_time): """ Adding post scanning data """ with GlobalVars.PostScanStat.rw_lock: GlobalVars.PostScanStat.num_posts_scanned += posts_scanned GlobalVars.PostScanStat.post_scan_time += scan_time @staticmethod def get_stat(): """ Getting post scanning statistics """ with GlobalVars.PostScanStat.rw_lock: posts_scanned = GlobalVars.PostScanStat.num_posts_scanned scan_time = GlobalVars.PostScanStat.post_scan_time if scan_time == 0: posts_per_second = None else: posts_per_second = posts_scanned / scan_time return (posts_scanned, scan_time, posts_per_second) @staticmethod def reset_stat(): """ Resetting post scanning data """ with GlobalVars.PostScanStat.rw_lock: GlobalVars.PostScanStat.num_posts_scanned = 0 GlobalVars.PostScanStat.post_scan_time = 0 config_parser = ConfigParser(interpolation=None) if os.path.isfile('config') and "pytest" not in sys.modules: config_parser.read('config') else: config_parser.read('config.ci') config = config_parser["Config"] # It's a collections.OrderedDict now site_id_dict = {} site_id_dict_by_id = {} site_id_dict_timestamp = 0 site_id_dict_issues_into_chat_timestamp = 0 site_id_dict_lock = threading.Lock() post_site_id_to_question = {} location = config.get("location", "Continuous Integration") # DNS Configuration # Configure resolver based on config options, or System, configure DNS Cache in # thread-safe cache as part of dnspython's resolver system as init options, # control cleanup interval based on **TIME** like a regular DNS server does. # # # Explicitly defining fallback= for fallback values in bool and float getters, in order to # # avoid IDE complaints -- tward dns_nameservers = config.get("dns_resolver", "system").lower() dns_cache_enabled = config.getboolean("dns_cache_enabled", fallback=True) dns_cache_interval = config.getfloat("dns_cache_cleanup_interval", fallback=300.0) class MSStatus: """ Tracking metasmoke status """ ms_is_up = True counter = 0 rw_lock = threading.Lock() @staticmethod def set_up(): """ Set metasmoke status to up """ # Private to metasmoke.py with GlobalVars.MSStatus.rw_lock: GlobalVars.MSStatus.ms_is_up = True @staticmethod def set_down(): """ Set metasmoke status to down """ # Private to metasmoke.py with GlobalVars.MSStatus.rw_lock: GlobalVars.MSStatus.ms_is_up = False @staticmethod def is_up(): """ Query if metasmoke status is up """ with GlobalVars.MSStatus.rw_lock: current_ms_status = GlobalVars.MSStatus.ms_is_up return current_ms_status @staticmethod def is_down(): """ Query if metasmoke status is down """ return not GlobalVars.MSStatus.is_up() # Why implement failed() and succeeded() here, as they will only be called in metasmoke.py? # Because get_failure_count() need to be exposed to global, so it is more convenient # to implement failed() and succeeded() here. @staticmethod def failed(): """ Indicate a metasmoke connection failure """ with GlobalVars.MSStatus.rw_lock: GlobalVars.MSStatus.counter += 1 @staticmethod def succeeded(): """ Indicate a metasmoke connection success """ with GlobalVars.MSStatus.rw_lock: GlobalVars.MSStatus.counter = 0 @staticmethod def get_failure_count(): """ Get consecutive metasmoke connection failure count """ with GlobalVars.MSStatus.rw_lock: failure_count = GlobalVars.MSStatus.counter return failure_count @staticmethod def reset_ms_status(): """ Reset class GlobalVars.MSStatus to default values """ with GlobalVars.MSStatus.rw_lock: GlobalVars.MSStatus.ms_is_up = True GlobalVars.MSStatus.counter = 0 chatexchange_u = config.get("ChatExchangeU") chatexchange_p = config.get("ChatExchangeP") metasmoke_host = config.get("metasmoke_host") metasmoke_key = config.get("metasmoke_key") metasmoke_ws_host = config.get("metasmoke_ws_host") git_name = config.get("git_username", "SmokeDetector") git_email = config.get("git_useremail", "*****@*****.**") github_username = config.get("github_username") github_password = config.get("github_password") github_access_token = config.get("github_access_token") perspective_key = config.get("perspective_key") flovis_host = config.get("flovis_host") flovis = None # Miscellaneous log_time_format = config.get("log_time_format", "%H:%M:%S") # Blacklist privileged users from config se_blacklisters = regex.sub(r"[^\d,]", "", config.get("se_blacklisters", "")).split(",") mse_blacklisters = regex.sub(r"[^\d,]", "", config.get("mse_blacklisters", "")).split(",") so_blacklisters = regex.sub(r"[^\d,]", "", config.get("so_blacklisters", "")).split(",") # Create a set of blacklisters equivalent to what's used in code_privileged_users. config_blacklisters = set() for id in se_blacklisters: if id: config_blacklisters.add(("stackexchange.com", int(id))) for id in mse_blacklisters: if id: config_blacklisters.add(("meta.stackexchange.com", int(id))) for id in so_blacklisters: if id: config_blacklisters.add(("stackoverflow.com", int(id))) # If the config has it, get a list of the detection reasons which are considered valid. # The list is semicolon separated. valid_detection_reasons = config.get("valid_detection_reasons", None) if valid_detection_reasons is not None: valid_detection_reasons = valid_detection_reasons.split(";") # If the config has it, get a list of the detection IDs which are considered valid. # The list is semicolon separated. valid_rule_ids = config.get("valid_rule_ids", None) if valid_rule_ids is not None: valid_rule_ids = valid_rule_ids.split(";") # environ_or_none replaced by os.environ.get (essentially dict.get) bot_name = os.environ.get("SMOKEDETECTOR_NAME", git_name) bot_repo_slug = os.environ.get("SMOKEDETECTOR_REPO", git_user_repo) bot_repository = "//github.com/{}".format(bot_repo_slug) chatmessage_prefix = "[{}]({})".format(bot_name, bot_repository) valid_content = """This is a totally valid post that should never be caught. Any blacklist or watchlist item that triggers on this item should be avoided. java.io.BbbCccDddException: nothing wrong found. class Safe { perfect valid code(int float &#%$*v a b c =+ /* - 0 1 2 3 456789.EFGQ} English 中文Français Español Português Italiano Deustch ~@#%*-_/'()?!:;" vvv kkk www sss ttt mmm absolute std::adjacent_find (power).each do |s| bbb end ert zal l gsopsq kdowhs@ xjwk* %_sooqmzb xjwpqpxnf. Please don't blacklist disk-partition.com, it's a valid domain (though it also gets spammed rather frequently).""" # noqa: E501 @classmethod def reload(cls): cls.commit = commit = git_commit_info() cls.commit_with_author = "`{}` ({}: {})".format( commit.id, commit.author, commit.message) # We don't want to escape `[` and `]` when they are within code. split_commit_with_author = cls.commit_with_author.split('`') split_length = len(split_commit_with_author) for index in range(0, split_length, 2): split_commit_with_author[index] = split_commit_with_author[ index].replace('[', '\\[').replace(']', '\\]') # There's not an even number of ` characters, so the parsing hack failed, but we assume the last one needs # escaping. if not split_length % 2: split_commit_with_author[-1] = split_commit_with_author[ -1].replace('[', '\\[').replace(']', '\\]') cls.commit_with_author_escaped = '`'.join(split_commit_with_author) cls.on_branch = git_ref() cls.s = "[ {} ] SmokeDetector started at [rev {}]({}/commit/{}) (running on {}, Python {})".format( cls.chatmessage_prefix, cls.commit_with_author_escaped, cls.bot_repository, cls.commit.id, cls.location, platform.python_version()) cls.s_reverted = \ "[ {} ] SmokeDetector started in [reverted mode](" \ "https://charcoal-se.org/smokey/SmokeDetector-Statuses#reverted-mode) " \ "at [rev {}]({}/commit/{}) (running on {})".format( cls.chatmessage_prefix, cls.commit_with_author_escaped, cls.bot_repository, cls.commit.id, cls.location) cls.s_norestart_blacklists = \ "[ {} ] Blacklists reloaded at [rev {}]({}/commit/{}) (running on {})".format( cls.chatmessage_prefix, cls.commit_with_author_escaped, cls.bot_repository, cls.commit.id, cls.location) cls.s_norestart_findspam = \ "[ {} ] FindSpam module reloaded at [rev {}]({}/commit/{}) (running on {})".format( cls.chatmessage_prefix, cls.commit_with_author_escaped, cls.bot_repository, cls.commit.id, cls.location) cls.standby_message = \ "[ {} ] SmokeDetector started in [standby mode](" \ "https://charcoal-se.org/smokey/SmokeDetector-Statuses#standby-mode) " \ "at [rev {}]({}/commit/{}) (running on {})".format( cls.chatmessage_prefix, cls.commit_with_author_escaped, cls.bot_repository, cls.commit.id, cls.location)