def setUp(self): print print("- " * 35) print(self._testMethodName) print("- " * 35) print self.test_name = self._testMethodName[5:] self.setupClass() print("--v-v-- begin setUp for %s --v-v--" % self.test_name) self.calcPaths() self.scmlogs = ScmInvocationLogs(self.scm, self.test_dir) self.scmlogs.next('fixtures') self.initDirs() self.fixtures = self.fixtures_class(self.test_dir, self.scmlogs) self.fixtures.setup() self.scmlogs.next('start-test') self.scmlogs.annotate('Starting %s test' % self.test_name) os.putenv('CACHEDIRECTORY', self.cachedir) # osc launches source services with cwd as pkg dir os.chdir(self.pkgdir) print("--^-^-- end setUp for %s --^-^--" % self.test_name)
def start_ccx(self): import multiprocessing import os import subprocess self.ccx_stdout = "" self.ccx_stderr = "" if self.inp_file_name != "" and self.ccx_binary_present: ont_backup = os.environ.get('OMP_NUM_THREADS') self.ccx_prefs = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Fem/Ccx") num_cpu_pref = self.ccx_prefs.GetInt("AnalysisNumCPUs", 1) # If number of CPU's specified if not ont_backup: ont_backup = str(num_cpu_pref) if num_cpu_pref > 1: _env = os.putenv('OMP_NUM_THREADS', str(num_cpu_pref)) # if user picked a number use that instead else: _env = os.putenv('OMP_NUM_THREADS', str(multiprocessing.cpu_count())) # change cwd because ccx may crash if directory has no write permission # there is also a limit of the length of file names so jump to the document directory cwd = QtCore.QDir.currentPath() f = QtCore.QFileInfo(self.inp_file_name) QtCore.QDir.setCurrent(f.path()) p = subprocess.Popen([self.ccx_binary, "-i ", f.baseName()], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, env=_env) self.ccx_stdout, self.ccx_stderr = p.communicate() os.putenv('OMP_NUM_THREADS', ont_backup) QtCore.QDir.setCurrent(cwd) return p.returncode return -1
def ToolSpecificFlags(self): add_env = {"CHROME_ALLOCATOR": "WINHEAP"} for k, v in add_env.iteritems(): logging.info("export %s=%s", k, v) os.putenv(k, v) proc = ThreadSanitizerBase.ToolSpecificFlags(self) # On PIN, ThreadSanitizer has its own suppression mechanism # and --log-file flag which work exactly on Valgrind. suppression_count = 0 for suppression_file in self._options.suppressions: if os.path.exists(suppression_file): suppression_count += 1 suppression_file = common.NormalizeWindowsPath(suppression_file) proc += ["--suppressions=%s" % suppression_file] if not suppression_count: logging.warning("WARNING: NOT USING SUPPRESSIONS!") logfilename = self.log_dir + "/tsan.%p" proc += ["--log-file=" + common.NormalizeWindowsPath(logfilename)] # TODO(timurrrr): Add flags for Valgrind trace children analog when we # start running complex tests (e.g. UI) under TSan/Win. return proc
def start_syslogng(conf, keep_persist=False, verbose=False): global syslogng_pid os.system('rm -f test-*.log test-*.lgs test-*.db wildcard/* log-file') if not keep_persist: os.system('rm -f syslog-ng.persist') if not logstore_store_supported: conf = re.sub('logstore\(.*\);', '', conf) f = open('test.conf', 'w') f.write(conf) f.close() if verbose: verbose_opt = '-edv' else: verbose_opt = '-e' syslogng_pid = os.fork() if syslogng_pid == 0: os.putenv("RANDFILE", "rnd") module_path = '' for (root, dirs, files) in os.walk(os.path.abspath(os.path.join(os.environ['top_builddir'], 'modules'))): module_path = ':'.join(map(lambda x: root + '/' + x + '/.libs', dirs)) break rc = os.execl('../../syslog-ng/syslog-ng', '../../syslog-ng/syslog-ng', '-f', 'test.conf', '--fd-limit', '1024', '-F', verbose_opt, '-p', 'syslog-ng.pid', '-R', 'syslog-ng.persist', '--no-caps', '--enable-core', '--seed', '--module-path', module_path) sys.exit(rc) time.sleep(5) print_user("Syslog-ng started") return True
def symbolic (filee, inputs, analysis, jalangi=util.DEFAULT_INSTALL): try: shutil.rmtree("jalangi_tmp") except: pass os.mkdir("jalangi_tmp") os.putenv("JALANGI_HOME", jalangi.get_home()) os.chdir("jalangi_tmp") (instrumented_f, out) = instrument(os.path.join(os.pardir,filee), jalangi=jalangi) i = 0 iters = 1 while i <= iters and i <= inputs: try: # Ignore failures on first iteration os.remove("inputs.js") except: pass if not os.path.isfile("inputs.js"): util.mkempty("inputs.js") util.run_node_script_std(jalangi.symbolic_script(), analysis, os.path.join(os.path.dirname(os.path.join(os.pardir,filee) + ".js"),instrumented_f), jalangi=jalangi, savestderr=True) try: iters = int(util.head("jalangi_tail",1)[0]) except: pass i = i + 1 if iters == inputs: print "{}.js passed".format(filee) with open("../jalangi_sym_test_results", 'a') as f: f.write("{}.js passed\n".format(filee)) else: print "{}.js failed".format(filee) with open("../jalangi_sym_test_results", 'a') as f: f.write("{}.js failed\n".format(filee)) print "Tests Generated = {}".format(iters)
def executeOtb(commands, progress, addToLog=True): loglines = [] loglines.append(tr("OTB execution console output")) os.putenv("ITK_AUTOLOAD_PATH", otbLibPath()) fused_command = "".join(['"%s" ' % re.sub(r'^"|"$', "", c) for c in commands]) with subprocess.Popen( fused_command, shell=True, stdout=subprocess.PIPE, stdin=subprocess.DEVNULL, stderr=subprocess.STDOUT, universal_newlines=True, ) as proc: if isMac(): # This trick avoids having an uninterrupted system call exception if OTB is not installed time.sleep(1) for line in iter(proc.stdout.readline, ""): if "[*" in line: idx = line.find("[*") perc = int(line[idx - 4 : idx - 2].strip(" ")) if perc != 0: progress.setPercentage(perc) else: loglines.append(line) progress.setConsoleInfo(line) if addToLog: ProcessingLog.addToLog(ProcessingLog.LOG_INFO, loglines) return loglines
def setProcessEnvironment(process): envvar_list = { "PATH" : getGdalBinPath(), "PYTHONPATH" : getGdalPymodPath() } sep = os.pathsep for name, val in envvar_list.iteritems(): if val == None or val == "": continue envval = os.getenv(name) if envval == None or envval == "": envval = str(val) elif not QString( envval ).split( sep ).contains( val, Qt.CaseInsensitive ): envval += "%s%s" % (sep, str(val)) else: envval = None if envval != None: os.putenv( name, envval ) if False: # not needed because os.putenv() has already updated the environment for new child processes env = QProcess.systemEnvironment() if env.contains( QRegExp( "^%s=(.*)" % name, Qt.CaseInsensitive ) ): env.replaceInStrings( QRegExp( "^%s=(.*)" % name, Qt.CaseInsensitive ), "%s=\\1%s%s" % (name, sep, gdalPath) ) else: env << "%s=%s" % (name, val) process.setEnvironment( env )
def SimpleTest(self, module, name, heapcheck_test_args=None, cmd_args=None): '''Builds the command line and runs the specified test. Args: module: The module name (corresponds to the dir in src/ where the test data resides). name: The executable name. heapcheck_test_args: Additional command line args for heap checker. cmd_args: Additional command line args for the test. ''' cmd = self._DefaultCommand(module, name, heapcheck_test_args) supp = self.Suppressions() self._ReadGtestFilterFile(name, cmd) if cmd_args: cmd.extend(["--"]) cmd.extend(cmd_args) # Sets LD_LIBRARY_PATH to the build folder so external libraries can be # loaded. if (os.getenv("LD_LIBRARY_PATH")): os.putenv("LD_LIBRARY_PATH", "%s:%s" % (os.getenv("LD_LIBRARY_PATH"), self._options.build_dir)) else: os.putenv("LD_LIBRARY_PATH", self._options.build_dir) return heapcheck_test.RunTool(cmd, supp, module)
def build_bundle(args): version = args.version temp_dir = tempfile.mkdtemp() start_dir = os.getcwd() try: os.putenv('CONDA_ENVS_PATH', temp_dir) # 1. Install OpenMDAO to a temporary conda environment # 2. Grab all packages # 3. Make tar file create_env( 'openmdao-bundle', ['openmdao=={version}'.format(version=version)], channel='http://conda.binstar.org/openmdao', yes=True ) os.chdir('{envs_path}/.pkgs'.format(envs_path=temp_dir)) pkgs = glob.glob('*.tar.bz2') out = tarfile.open('openmdao.tar', mode='w') with tarfile.open('openmdao.tar', mode='w') as tar: for pkg in pkgs: tar.add(pkg, recursive=False) shutil.move( 'openmdao.tar', '{start_dir}/openmdao.tar'.format(start_dir=start_dir) ) finally: os.chdir(start_dir) os.unsetenv('CONDA_ENVS_PATH') shutil.rmtree(temp_dir)
def _get_interface_mapping(self): os.putenv("CLASSPATH", self.classpath); executionList=['java', 'RunCliCommand'] executionList.extend([self.mgmt_ip, self.username, self.password]) executionList.extend(["show route terse"]) command_exec_success = False while not command_exec_success: interfaces_dict = dict() try: op_list=[] retVal="" retVal = utils.execute(executionList, addl_env=self.addl_env) for line in retVal.splitlines(retVal.count('\n')): op_list.append(line) for i in range(len(op_list)): iface_dict = dict() if op_list[i] != '\n': op_split = op_list[i].split(" ") if op_split[-4].find('ge') >=0: iface_dict['network'] = op_split[1] new_op = op_list[i+1] new_op_split = new_op.split(" ") iface_dict['ip'] = new_op_split[1] interfaces_dict[op_split[-4][1:]] = iface_dict command_exec_success = True except Exception as e: pass return interfaces_dict
def _get_vsrx_zone_list(self): os.putenv("CLASSPATH", self.classpath); executionList=['java', 'RunConfigurationCommand'] executionList.extend([self.mgmt_ip, self.username, self.password]) command_exec_success = False zone_list=[] try: while not command_exec_success: try: op_list=[] executionList.extend(["show security zones"]) retVal="" retVal = utils.execute(executionList, addl_env=self.addl_env) for line in retVal.splitlines(retVal.count('\n')): if re.search("security-zone", line): line = line.rstrip() op_list=re.split(" ", line) zone=op_list[len(op_list)-2] zone_list.extend([zone]) if "exception" in retVal: command_exec_success = False else: command_exec_success = True except Exception: pass finally: return zone_list
def set_driver(): """Sets video driver to use directly in framebuffer. This function is used when you are going to use video functionality with pygame but without using X/Wayland. This function should be ran before initializing pygame. """ logger.info("Setting display to use framebuffer") display_number = os.getenv("DISPLAY") if display_number: logger.info("X found: %r" % display_number) drivers = ["fbcon", "directfb", "svgalib"] found = False for driver in drivers: if not os.getenv("SDL_VIDEODRIVER"): os.putenv("SDL_VIDEODRIVER", driver) try: pygame.display.init() logger.info("Using %s driver" % driver) except pygame.error: print "Driver: {0} failed.".format(driver) continue found = True break if not found: raise Exception("No suitable video driver found!")
def render(self): """ Render the URL into a pdf and setup the evironment if required. """ # setup the environment if it isn't set up yet if not os.getenv('DISPLAY'): os.system("Xvfb :0 -screen 0 %sx%sx%s & " % ( self.screen_resolution[0], self.screen_resolution[1], self.color_depth )) os.putenv("DISPLAY", '127.0.0.1:0') # execute the command command = 'wkhtmltopdf %s "%s" "%s" >> /tmp/wkhtp.log' % ( " ".join([cmd for cmd in self.params]), self.url, self.output_file ) try: p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, close_fds=True) stdout, stderr = p.communicate() retcode = p.returncode if retcode == 0: # call was successful return elif retcode < 0: raise Exception("Terminated by signal: ", -retcode) else: raise Exception(stderr) except OSError, exc: raise exc
def debug_server(self, shared_queue): self.read_configuration() uid = int(self.server_uid) if os.getuid() != uid: os.setresuid(uid, uid, uid) gid = int(self.server_gid) if os.getgid() != gid: os.setresgid(gid, gid, gid) for key in self.env: debug("Setting environment variable %s=%s" % (key, self.env[key])) os.putenv(key, self.env[key]) if self.pre_command is not None: os.system(self.pre_command) crash = None for i in range(0,3): try: crash = self.launch_debugger(self.timeout, self.command, "") break except: log("Exception: %s" % sys.exc_info()[1]) continue if self.post_command is not None: os.system(self.post_command) if crash is not None: self.crash_info = crash shared_queue.put(crash) return True return False
def __init__( self, **args ): self.subinfo = subinfo() CMakePackageBase.__init__(self) # jom reports missing moc_translator.xxx self.subinfo.options.make.supportsMultijob = False # add support for other location based on pythonpath localPythonPath = os.path.join(self.rootdir, 'emerge', 'python') haveLocalPython = os.path.exists(localPythonPath) if compiler.isMSVC2008(): specName = "win32-msvc2008" elif compiler.isMSVC2010(): specName = "win32-msvc2010" elif compiler.isMinGW(): specName = "win32-g++" else: utils.die("compiler %s not supported for PyQt4" % compiler.COMPILER) if haveLocalPython: specDir = self.mergeDestinationDir() else: specDir = self.rootdir os.putenv("QMAKESPEC", os.path.join(specDir, "mkspecs", specName)) if haveLocalPython: self.subinfo.options.merge.destinationPath = "emerge/python" self.subinfo.options.configure.defines = " --confirm-license --verbose" if self.buildType() == "Debug": self.subinfo.options.configure.defines += " -u"
def __init__(self, amsn_core): import os os.putenv("QT_NO_GLIB", "1") # FIXME: Temporary workaround for segfault # caused by GLib Event Loop integration self.app = QApplication(sys.argv) self.gmainloop = gobject.MainLoop() self.gcontext = self.gmainloop.get_context()
def run(command, env_vars=None): if env_vars: for key, value in env_vars.items(): os.putenv(key, value) print('*** Running: %s%s%s' % (COLOR_OK, command, COLOR_END)) if os.system(command): raise RuntimeError(' FAILED: %s' % (command))
def __init__(self): "Ininitializes a new pygame screen using the framebuffer" # Based on "Python GUI in Linux frame buffer" # http://www.karoltomala.com/blog/?p=679 disp_no = os.getenv("DISPLAY") if disp_no: print "I'm running under X display = {0}".format(disp_no) os.putenv('SDL_FBDEV', '/dev/fb1') # Select frame buffer driver # Make sure that SDL_VIDEODRIVER is set driver = 'fbcon' if not os.getenv('SDL_VIDEODRIVER'): os.putenv('SDL_VIDEODRIVER', driver) try: pygame.display.init() except pygame.error: print 'Driver: {0} failed.'.format(driver) exit(0) size = (pygame.display.Info().current_w, pygame.display.Info().current_h) print "Framebuffer size: %d x %d" % (size[0], size[1]) self.screen = pygame.display.set_mode(size, pygame.FULLSCREEN) # Clear the screen to start self.screen.fill((0, 0, 0)) # Initialise font support pygame.font.init() # Render the screen pygame.display.update()
def test_changing_env(self): os.putenv('ENV', 'test') run_migrator() result = session.execute( 'SELECT * FROM migrations_test.schema_migrations LIMIT 1' ) self.assertEquals(result[0].version, 2)
def load_modules(self,*k,**kw): module_string = '' try: for module in kw['modules']: module_string += module+' ' except KeyError: self.fatal('You must give modules to function check_modules like this: check_module(modules=[a,b,c])') #self.start_msg('Loading modules') p = subprocess.Popen('source /usr/local/Modules/current/init/bash && module load '+module_string+' && export -p', shell=True, stdout=subprocess.PIPE) p.wait() if p.returncode == 0: for key in os.environ.iterkeys(): os.unsetenv(key) for line in p.stdout: m = re.search('(\w+)=(".+")$', line) if (m): os.putenv(m.group(1), m.group(2)) #self.end_msg(module_string) else: self.fatal('Loading modules did not work')
def get_cuda_kernel_prototypes(binary): os.putenv("LD_PRELOAD", "") output = subprocess.Popen(GDB_COMMAND + [binary] + LIST_KERNELS + QUIT, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0] functions = " ".join([f for f in str(output).split(SPLITTER)[1].split("\n") \ if not f.startswith("All functions") and \ not f.startswith("File ") \ and f]) #full_fct_name = functions.split("\\nvoid ")[7].split("(")[0] kern_list = [e.split("Pi")[0]\ .split("PK")[0]\ .split("_implii")[0]\ .split("ii")[0] for e in re.findall(r'Z\d+(\S+(?!(?:Pi)))(?:Pf|Pi|Pc)', functions)] kern_list = sorted([e if not e.endswith("i") else e[:-1] for e in kern_list]) kern_list = [e.replace("ILi", "<").replace("EEv", ">") for e in kern_list] print_kernels = [] for kern in kern_list: print_kernels += ["-ex", "print {}".format(kern)] output = subprocess.Popen(GDB_COMMAND + [binary] + print_kernels + QUIT, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0] locations = [e.split(" = ")[1].strip() for e in str(output).split(SPLITTER)[1].replace("\\n", " ").split("$") if " = " in e] return locations
def __prepare_env(self, env_string): env_dict = self.__get_dict(env_string) if "TERM" not in env_dict: env_dict["TERM"] = "xterm" env_dict["PATH"] = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" for key in ["DISPLAY", "TERM", "PATH"]: os.putenv(key, env_dict[key])
def setup_path(): """ Lookup config file path """ if HOLLANDCFG.lookup("holland.path"): os.putenv("PATH", HOLLANDCFG.lookup("holland.path")) os.environ["PATH"] = HOLLANDCFG.lookup("holland.path")
def __init__(self): "Ininitializes a new pygame screen using the framebuffer" disp_no = os.getenv("DISPLAY") if disp_no: print "I'm running under X display = {0}".format(disp_no) os.putenv('SDL_FBDEV', device) drivers = ['fbcon', 'directfb', 'svgalib'] found = False for driver in drivers: # Make sure that SDL_VIDEODRIVER is set if not os.getenv('SDL_VIDEODRIVER'): os.putenv('SDL_VIDEODRIVER', driver) try: pygame.display.init() except pygame.error: print 'Driver: {0} failed.'.format(driver) continue found = True break if not found: raise Exception('No suitable video driver found!') pygame.mixer.init() size = (pygame.display.Info().current_w, pygame.display.Info().current_h) print "Framebuffer size: %d x %d" % (size[0], size[1]) self.screen = pygame.display.set_mode(size, pygame.FULLSCREEN) self.screen.fill((0,0,0)) pygame.font.init() pygame.display.update()
def _do_config(): """ Configure the runtime environment to recognize the docker daemon. This must be called before any Docker commands are run. NOTE: this assumes a OSX or Windows install because it uses docker-machine. I have no idea how to do it in a linux environment. :( :return None: """ command = 'docker-machine env' with Popen(command, shell=True, stdout=PIPE, stderr=PIPE, universal_newlines=True) as proc: env_values, std_error = proc.communicate() for line in env_values.split('\n'): line = line.strip() if line == '' or line.startswith('#'): continue # each line is of the form "export KEY=VALUE", so isolate the key/value pair = line.split()[1] [key, value] = pair.split('=') # in the shell we surround the value with quotes. for os.putenv we don't value = value.strip('"') os.putenv(key, value) # This has to succeed or we cannot continue if proc.returncode != 0: raise CalledProcessError(proc.returncode, command)
def drawitall(): os.putenv('SDL_FBDEV', '/dev/fb1') pygame.init() background.fill(bcolor) apron() circles(1, green) circles(2, green) circles(3, green) headers(zone1, 1) headers(zone2, 2) headers(zone3, 3) footers(1) footers(2) footers(3) title() complications(1) complications(2) complications(3) number(1, 0) number(2, 0) number(3, 0) compnumber(3, 0) compnumber(2, 0) compnumber(1, '04:00') comptext(1, c1t) comptext(2, c2t) comptext(3, c3t) lcd.blit(background, (0, 0)) pygame.display.flip()
def doUploadToStaging(buildmode, workingDir, buildVersion, log): print "doUploadToStaging..." if buildmode == "debug": dbgStr = "DEBUG=1" else: dbgStr = "" buildRoot = os.path.join(workingDir, 'external') print 'Setting BUILD_ROOT=', buildRoot log.write('Setting BUILD_ROOT=' + buildRoot + '\n') os.putenv('BUILD_ROOT', buildRoot) os.chdir(buildRoot) uploadDir = os.path.join(buildRoot, buildVersion) if not os.path.exists(uploadDir): os.mkdir(uploadDir) try: upload = ' uploadworld UPLOAD=' + uploadDir print "Doing make " + dbgStr + upload log.write("Doing make " + dbgStr + upload + "\n") outputList = hardhatutil.executeCommandReturnOutput( [buildenv['make'], dbgStr, upload]) hardhatutil.dumpOutputList(outputList, log) log.write(separator) except hardhatutil.ExternalCommandErrorWithOutputList, e: print "upload error" log.write("***Error during upload***\n") log.write(separator) log.write("Build log:" + "\n") hardhatutil.dumpOutputList(e.outputList, log) log.write(separator) raise e
def _init_posix(): """Initialize the module as appropriate for POSIX systems.""" g = {} # load the installed Makefile: try: filename = get_makefile_filename() parse_makefile(filename, g) except IOError as msg: my_msg = "invalid Python installation: unable to open %s" % filename if hasattr(msg, "strerror"): my_msg = my_msg + " (%s)" % msg.strerror raise DistutilsPlatformError(my_msg) # load the installed pyconfig.h: try: filename = get_config_h_filename() with open(filename) as file: parse_config_h(file, g) except IOError as msg: my_msg = "invalid Python installation: unable to open %s" % filename if hasattr(msg, "strerror"): my_msg = my_msg + " (%s)" % msg.strerror raise DistutilsPlatformError(my_msg) # On MacOSX we need to check the setting of the environment variable # MACOSX_DEPLOYMENT_TARGET: configure bases some choices on it so # it needs to be compatible. # If it isn't set we set it to the configure-time value if sys.platform == 'darwin' and 'MACOSX_DEPLOYMENT_TARGET' in g: cfg_target = g['MACOSX_DEPLOYMENT_TARGET'] cur_target = os.getenv('MACOSX_DEPLOYMENT_TARGET', '') if cur_target == '': cur_target = cfg_target os.putenv('MACOSX_DEPLOYMENT_TARGET', cfg_target) elif [int(x) for x in cfg_target.split('.')] > [int(x) for x in cur_target.split('.')]: my_msg = ('$MACOSX_DEPLOYMENT_TARGET mismatch: now "%s" but "%s" during configure' % (cur_target, cfg_target)) raise DistutilsPlatformError(my_msg) # On AIX, there are wrong paths to the linker scripts in the Makefile # -- these paths are relative to the Python source, but when installed # the scripts are in another directory. if python_build: g['LDSHARED'] = g['BLDSHARED'] elif get_python_version() < '2.1': # The following two branches are for 1.5.2 compatibility. if sys.platform == 'aix4': # what about AIX 3.x ? # Linker script is in the config directory, not in Modules as the # Makefile says. python_lib = get_python_lib(standard_lib=1) ld_so_aix = os.path.join(python_lib, 'config', 'ld_so_aix') python_exp = os.path.join(python_lib, 'config', 'python.exp') g['LDSHARED'] = "%s %s -bI:%s" % (ld_so_aix, g['CC'], python_exp) global _config_vars _config_vars = g
def buildQtWebKit(self): if self.options.skip_qtwebkit: print("Skipping build of Qt WebKit") return if self.options.git_clean_qtwebkit: self.gitClean("src/qt/qtwebkit") os.putenv("SQLITE3SRCDIR", os.path.abspath("src/qt/qtbase/src/3rdparty/sqlite")) print("configuring Qt WebKit, please wait...") configureOptions = [ # disable some webkit features we do not need "WEBKIT_CONFIG-=build_webkit2", "WEBKIT_CONFIG-=netscape_plugin_api", "WEBKIT_CONFIG-=use_gstreamer", "WEBKIT_CONFIG-=use_gstreamer010", "WEBKIT_CONFIG-=use_native_fullscreen_video", "WEBKIT_CONFIG-=video", "WEBKIT_CONFIG-=web_audio", ] if self.options.webkit_qmake_args: configureOptions.extend(self.options.webkit_qmake_args) if self.qmake("src/qt/qtwebkit", configureOptions) != 0: raise RuntimeError("Configuration of Qt WebKit failed.") print("building Qt WebKit, please wait...") if self.make("src/qt/qtwebkit") != 0: raise RuntimeError("Building Qt WebKit failed.")
def _start_omc(self): self._server = None self._omc_command = None try: self.omhome = os.environ['OPENMODELICAHOME'] # add OPENMODELICAHOME\lib to PYTHONPATH so python can load omniORB libraries sys.path.append(os.path.join(self.omhome, 'lib')) sys.path.append(os.path.join(self.omhome, 'lib', 'python')) # add OPENMODELICAHOME\bin to path so python can find the omniORB binaries pathVar = os.getenv('PATH') pathVar += ';' pathVar += os.path.join(self.omhome, 'bin') os.putenv('PATH', pathVar) self._set_omc_corba_command(os.path.join(self.omhome, 'bin', 'omc')) self._start_server() except: # FIXME: what is this case? are we looking at platform specifics? or different versions of OpenModelica? try: import OMConfig PREFIX = OMConfig.DEFAULT_OPENMODELICAHOME self.omhome = os.path.join(PREFIX) self._set_omc_corba_command(os.path.join(self.omhome, 'bin', 'omc')) self._start_server() except: # FIXME: what is this case? are we looking at platform specifics? or different versions of OpenModelica? try: self._set_omc_corba_command('/opt/local/bin/omc') self._start_server() except Exception as ex: self.logger.error("The OpenModelica compiler is missing in the System path, please install it") raise ex
def q_learning(file_name=None, plot=False, gap_division=3, gamma=0.75, epsilon=0.9, batch_size=128, reward_weight_decision=True, buffer_size=5000): os.putenv('SDL_VIDEODRIVER', 'fbcon') os.environ["SDL_VIDEODRIVER"] = "dummy" game = FlappyBird(width=game_width, height=game_height, pipe_gap=game_pipe_gap) p = PLE(game, frame_skip=6) p.init() last_state = None last_action = 0 last_actions_q_values = [0, 0] last_score = 0 buffer = [] episode = 0 network = Network(batch_size, gamma, epsilon, gap_division) if file_name is not None: network.load(file_name, rename=True) else: leaky_option_hidden_layers, leaky_option_last_layer = False, False activation_hidden_layers = input( "Enter the activation function for the hidden layers (leave empty for default activation (relu)) \n" ) activation_hidden_layers = "relu" if activation_hidden_layers == "" else activation_hidden_layers if activation_hidden_layers == "leaky relu": alpha_relu = input( "Enter alpha value for relu activation (0.3 by default)\n") if alpha_relu == "0.3" or alpha_relu == "": activation_hidden_layers = LeakyReLU(alpha=0.3) else: activation_hidden_layers = LeakyReLU(alpha=float(alpha_relu)) leaky_option_hidden_layers = True activation_last_layer = input( "Enter the activation function for the last layer (leave empty for default activation (linear)) \n" ) activation_last_layer = "linear" if activation_last_layer == "" else activation_last_layer if activation_last_layer == "leaky relu": alpha_relu = input( "Enter alpha value for relu activation (0.3 by default)\n") if alpha_relu == "0.3" or alpha_relu == "": activation_last_layer = LeakyReLU(alpha=0.3) else: activation_last_layer = LeakyReLU(alpha=float(alpha_relu)) leaky_option_last_layer = True weight_initializer = input( "Enter weight initializer (leave empty for default value (glorot_uniform)) \n" ) weight_initializer = "glorot_uniform" if weight_initializer == "" else weight_initializer bias_initializer = input( "Enter bias initializer (leave empty for default value (glorot_uniform)) \n" ) bias_initializer = "glorot_uniform" if bias_initializer == "" else bias_initializer loss_func = input( "Enter loss function (leave empty for default value (binary_crossentropy)) \n" ) loss_func = "binary_crossentropy" if loss_func == "" else loss_func optimizer = input( "Enter the optimizer for neural network (leave empty for default value (Adadelta)) or (Adadelta/RMSprop/SGD/Nadam) \n" ) optimizer = "Adadelta" if optimizer == "" else optimizer optimizer_parameters = set_optimizer_parameters(optimizer) network.create_layers( activation_hidden_layers=activation_hidden_layers, activation_last_layer=activation_last_layer, weight_initializer=weight_initializer, bias_initializer=bias_initializer, loss_function=loss_func, optimizer=optimizer, optimizer_parameters=optimizer_parameters, leaky_hidden_layers=leaky_option_hidden_layers, leaky_last_layer=leaky_option_last_layer) while 1: if p.game_over(): # restart the game p.reset_game() # count episodes episode += 1 if episode % 1000 == 0: network.save_file() # update plot print( f'\n episode={episode}, epsilon={epsilon}, buffer_size={len(buffer)}, score={last_score}' ) if plot is True: plt.scatter(episode, last_score) plt.pause(0.001) print(f'\n episode={episode}, score={last_score}') # adding the last entry correctly label = last_actions_q_values label[last_action] = -1000 if len(buffer) < buffer_size: buffer += [(last_state, label)] else: buffer = buffer[1:] + [(last_state, label)] # reset all last_state = None last_action = 0 last_actions_q_values = [0, 0] last_score = 0 # look at the current state current_state = p.getGameState() current_score = p.score() # compute the actions' Q values actions_q_values = network.Q(current_state).tolist() # Compute the label for the last_state reward = get_reward(state=current_state, gap_division=gap_division, reward_weight_decision=reward_weight_decision) max_q = max(actions_q_values) label = last_actions_q_values if current_score - last_score > 0: label[last_action] = (current_score - last_score) * 1000 else: label[last_action] = reward + gamma * max_q # not taking the first state into consideration if last_state is not None: # Update buffers if len(buffer) < buffer_size: buffer += [(last_state, label)] else: buffer = buffer[1:] + [(last_state, label)] # train if len(buffer) >= batch_size: sample = random.sample(buffer, batch_size) network.train(sample) # choose the optimal action with a chance of 1 - epsilon actions_indexes = np.arange(len(actions_q_values)) optimal_action_to_take = np.argmax(actions_q_values) random_action = np.random.choice(actions_indexes) if np.random.uniform() < epsilon: action = random_action else: action = optimal_action_to_take # act accordingly p.act(None if action == 0 else 119) # update epsilon if epsilon > 0.1: epsilon = epsilon - 0.00000075 # remember everything needed from the current state last_action = action last_state = current_state last_actions_q_values = actions_q_values last_score = current_score # Log sys.stdout.write( f'\rBottom: {game_height - current_state["next_pipe_bottom_y"]}, Top: {game_height - current_state["next_pipe_top_y"]}, Bird: {game_height - current_state["player_y"]}, Reward: {reward}' ) sys.stdout.flush()
def main(): # Extract settings from git repo repo = git_config_get('githubmerge.repository') host = git_config_get('githubmerge.host', '*****@*****.**') opt_branch = git_config_get('githubmerge.branch', None) testcmd = git_config_get('githubmerge.testcmd') signingkey = git_config_get('user.signingkey') if repo is None: print("ERROR: No repository configured. Use this command to set:", file=stderr) print("git config githubmerge.repository <owner>/<repo>", file=stderr) exit(1) if signingkey is None: print("ERROR: No GPG signing key set. Set one using:", file=stderr) print("git config --global user.signingkey <key>", file=stderr) exit(1) host_repo = host + ":" + repo # shortcut for push/pull target # Extract settings from command line args = parse_arguments() pull = str(args.pull[0]) # Receive pull information from github info = retrieve_pr_info(repo, pull) if info is None: exit(1) title = info['title'] # precedence order for destination branch argument: # - command line argument # - githubmerge.branch setting # - base branch for pull (as retrieved from github) # - 'master' branch = args.branch or opt_branch or info['base']['ref'] or 'master' # Initialize source branches head_branch = 'pull/' + pull + '/head' base_branch = 'pull/' + pull + '/base' merge_branch = 'pull/' + pull + '/merge' local_merge_branch = 'pull/' + pull + '/local-merge' devnull = open(os.devnull, 'w') try: subprocess.check_call([GIT, 'checkout', '-q', branch]) except subprocess.CalledProcessError as e: print("ERROR: Cannot check out branch %s." % (branch), file=stderr) exit(3) try: subprocess.check_call([ GIT, 'fetch', '-q', host_repo, '+refs/pull/' + pull + '/*:refs/heads/pull/' + pull + '/*' ]) except subprocess.CalledProcessError as e: print("ERROR: Cannot find pull request #%s on %s." % (pull, host_repo), file=stderr) exit(3) try: subprocess.check_call( [GIT, 'log', '-q', '-1', 'refs/heads/' + head_branch], stdout=devnull, stderr=stdout) except subprocess.CalledProcessError as e: print("ERROR: Cannot find head of pull request #%s on %s." % (pull, host_repo), file=stderr) exit(3) try: subprocess.check_call( [GIT, 'log', '-q', '-1', 'refs/heads/' + merge_branch], stdout=devnull, stderr=stdout) except subprocess.CalledProcessError as e: print("ERROR: Cannot find merge of pull request #%s on %s." % (pull, host_repo), file=stderr) exit(3) try: subprocess.check_call([ GIT, 'fetch', '-q', host_repo, '+refs/heads/' + branch + ':refs/heads/' + base_branch ]) except subprocess.CalledProcessError as e: print("ERROR: Cannot find branch %s on %s." % (branch, host_repo), file=stderr) exit(3) subprocess.check_call([GIT, 'checkout', '-q', base_branch]) subprocess.call([GIT, 'branch', '-q', '-D', local_merge_branch], stderr=devnull) subprocess.check_call([GIT, 'checkout', '-q', '-b', local_merge_branch]) try: # Create unsigned merge commit. if title: firstline = 'Merge #%s: %s' % (pull, title) else: firstline = 'Merge #%s' % (pull, ) message = firstline + '\n\n' message += subprocess.check_output([ GIT, 'log', '--no-merges', '--topo-order', '--pretty=format:%h %s (%an)', base_branch + '..' + head_branch ]).decode('utf-8') try: subprocess.check_call([ GIT, 'merge', '-q', '--commit', '--no-edit', '--no-ff', '-m', message.encode('utf-8'), head_branch ]) except subprocess.CalledProcessError as e: print("ERROR: Cannot be merged cleanly.", file=stderr) subprocess.check_call([GIT, 'merge', '--abort']) exit(4) logmsg = subprocess.check_output( [GIT, 'log', '--pretty=format:%s', '-n', '1']).decode('utf-8') if logmsg.rstrip() != firstline.rstrip(): print("ERROR: Creating merge failed (already merged?).", file=stderr) exit(4) print('%s#%s%s %s %sinto %s%s' % (ATTR_RESET + ATTR_PR, pull, ATTR_RESET, title, ATTR_RESET + ATTR_PR, branch, ATTR_RESET)) subprocess.check_call([ GIT, 'log', '--graph', '--topo-order', '--pretty=format:' + COMMIT_FORMAT, base_branch + '..' + head_branch ]) print() # Run test command if configured. if testcmd: # Go up to the repository's root. toplevel = subprocess.check_output( [GIT, 'rev-parse', '--show-toplevel']).strip() os.chdir(toplevel) if subprocess.call(testcmd, shell=True): print("ERROR: Running %s failed." % testcmd, file=stderr) exit(5) # Show the created merge. diff = subprocess.check_output( [GIT, 'diff', merge_branch + '..' + local_merge_branch]) subprocess.check_call( [GIT, 'diff', base_branch + '..' + local_merge_branch]) if diff: print("WARNING: merge differs from github!", file=stderr) reply = ask_prompt("Type 'ignore' to continue.") if reply.lower() == 'ignore': print("Difference with github ignored.", file=stderr) else: exit(6) reply = ask_prompt("Press 'd' to accept the diff.") if reply.lower() == 'd': print("Diff accepted.", file=stderr) else: print("ERROR: Diff rejected.", file=stderr) exit(6) else: # Verify the result manually. print( "Dropping you on a shell so you can try building/testing the merged source.", file=stderr) print("Run 'git diff HEAD~' to show the changes being merged.", file=stderr) print("Type 'exit' when done.", file=stderr) if os.path.isfile('/etc/debian_version' ): # Show pull number on Debian default prompt os.putenv('debian_chroot', pull) subprocess.call([BASH, '-i']) reply = ask_prompt("Type 'm' to accept the merge.") if reply.lower() == 'm': print("Merge accepted.", file=stderr) else: print("ERROR: Merge rejected.", file=stderr) exit(7) # Sign the merge commit. reply = ask_prompt("Type 's' to sign off on the merge.") if reply == 's': try: subprocess.check_call([ GIT, 'commit', '-q', '--gpg-sign', '--amend', '--no-edit' ]) except subprocess.CalledProcessError as e: print("Error signing, exiting.", file=stderr) exit(1) else: print("Not signing off on merge, exiting.", file=stderr) exit(1) # Put the result in branch. subprocess.check_call([GIT, 'checkout', '-q', branch]) subprocess.check_call( [GIT, 'reset', '-q', '--hard', local_merge_branch]) finally: # Clean up temporary branches. subprocess.call([GIT, 'checkout', '-q', branch]) subprocess.call([GIT, 'branch', '-q', '-D', head_branch], stderr=devnull) subprocess.call([GIT, 'branch', '-q', '-D', base_branch], stderr=devnull) subprocess.call([GIT, 'branch', '-q', '-D', merge_branch], stderr=devnull) subprocess.call([GIT, 'branch', '-q', '-D', local_merge_branch], stderr=devnull) # Push the result. reply = ask_prompt("Type 'push' to push the result to %s, branch %s." % (host_repo, branch)) if reply.lower() == 'push': subprocess.check_call([GIT, 'push', host_repo, 'refs/heads/' + branch])
def cronExecution(): # load configuration: global mainConfig mainConfig = GlcuConfig('cron') if (int(mainConfig.getMainConfig('verbosity')) > 2): # debugging only print("cronExecution: ", sys.argv) #initialize mail object: mail = CronOutput() #only run on the specified day myToday = datetime.date.today() myWeekDay = '%d' % myToday.isoweekday() if ((int(mainConfig.getMainConfig('cronday')) != int(myWeekDay)) and (int(mainConfig.getMainConfig('cronday')) != 0)): if (mainConfig.getMainConfig('rerunconfig')): os.execv('/etc/cron.daily/glcu', [ '/etc/cron.daily/glcu', mainConfig.getMainConfig('rerunconfig') ]) else: sys.exit(0) # Check if glcu file was upated. Otherwise send mail and write check-file if (int(mainConfig.getMainConfig('cronday')) == 8): if os.path.isfile('/tmp/glcuConfigNotUpdated'): sys.exit(1) else: mail.earlyErrorMail( 'Update config file!', "Important message from glcu:\n============================\n\n Edit the glcu config file '/etc/glcu.conf' to suit your needs!\n Otherwise glcu won't work." ) nO = open('/tmp/glcuConfigNotUpdated', 'w') nO.write("edit '/etc/glcu' and delete this file!") nO.close() # Check if there is already an glcu running runCheckCommand = '/bin/ps aux | grep glcu |grep "/etc/cron" | grep -v ' + str( os.getpid()) runCheckExitStatus = os.system(runCheckCommand) / 256 if (int(runCheckExitStatus) == 0): exception( 'There is already an instance of glcu running\n\n*** ABORTING') # 1. emerge sync if (mainConfig.getMainConfig('sync')): sync = ShellExecution('emerge --sync') if (sync.getExitStatus() != 0): mail.earlyErrorMail( 'emerge --sync failed', 'Error log for emerge --sync:\n\n' + sync.getOutput()) # 2. fixpackages (if wanted and needed) if (mainConfig.getMainConfig('fixpackages')): if (re.search( "Skipping packages. Run 'fixpackages' or set it in FEATURES", sync.getOutput())): fixpackages = ShellExecution('/usr/sbin/fixpackages') if (fixpackages.getExitStatus() != 0): mail.earlyErrorMail( 'fixpackages failed', 'Error log for fixpackages:\n\n' + fixpackages.getOutput()) # 3.a) run eupdatedb if (mainConfig.getMainConfig('eupdatedb')): eupdatedb = ShellExecution('/usr/bin/eupdatedb') if (eupdatedb.getExitStatus() != 0): mail.earlyErrorMail( 'eupdatedb failed', 'Error log for eupdatedb:\n\n' + eupdatedb.getOutput()) # 3.b) run eix-update if (mainConfig.getMainConfig('updateix')): eupdatedb = ShellExecution('/usr/bin/eix-update') if (eupdatedb.getExitStatus() != 0): mail.earlyErrorMail( 'eix-update failed', 'Error log for eix-update:\n\n' + eupdatedb.getOutput()) # 4. check for security updates (if wanted) global blockList blockList = [] updateList = [] secPackages = [] if (mainConfig.getMainConfig('security')): checkSecurity = ShellExecution('glsa-check --list --nocolor') if (checkSecurity.getExitStatus() != 0): mail.earlyErrorMail('Problem during glsa-check --list', checkSecurity.getOutput()) secPackages = re.findall(" \[N\].*?\(\s(.*?)\s\)", checkSecurity.getOutput()) if (int(mainConfig.getMainConfig('verbosity')) > 2): # debugging only print('secPackages: ', secPackages) for secPackage in secPackages: if (re.search("\s", secPackage)): severalPackages = re.split("\s", secPackage) secPackages.remove(secPackage) for singlePackage in severalPackages: if (singlePackage != '...'): secPackages.append(singlePackage) if (int(mainConfig.getMainConfig('verbosity')) > 2): # debugging only print('secPackage: ', secPackage) print('severalPackages: ', severalPackages) secPackages.reverse() if (int(mainConfig.getMainConfig('verbosity')) > 2): # debugging only print("glsa-package count: ", len(secPackages)) print("glsa-package list: ", secPackages) packageList = set(secPackages) # Only prebuilt security packages if they are installed: newSecPList = packageList.copy() for package in newSecPList: securityCheckCommand = 'emerge --pretend ' + package + ' |grep ' + package securityCheck = ShellExecution(securityCheckCommand) if re.findall("\[ebuild\s*N", securityCheck.getOutput()): packageList.discard(package) # 5. check for system/world if ((mainConfig.getMainConfig('update') == 'system') or (mainConfig.getMainConfig('update') == 'world')): os.putenv('NOCOLOR', 'true') emergePretendCommand = 'emerge --pretend ' + mainConfig.getMainConfig( 'sysworldoptions') + ' ' + mainConfig.getMainConfig('update') emergePretend = ShellExecution(emergePretendCommand) if (emergePretend.getExitStatus() != 0): mail.earlyErrorMail('Problem during ' + emergePretendCommand, emergePretend.getOutput()) updateList = re.findall("\[ebuild.*?\]\s(.*?)\s.*?\n", emergePretend.getOutput()) blockList = re.findall("\[blocks.*?\]\s(.*?)\s.*?\n", emergePretend.getOutput()) updateList.reverse() for block in blockList: mail.addBlockPackage(block) elif (mainConfig.getMainConfig('update') == 'security'): pass else: mail.earlyErrorMail( 'unsupported value for update-option in config file', '*** ERROR***\n\n\nglcu found an unsupported option in /etc/glcu.conf:\n\n update: ' + mainConfig.getMainConfig('update') + '\n\nThis needs to be fixed for glcu to work!') # check for duplicates (with and without version numbers): cpPackList = packageList.copy() for package in cpPackList: for upackage in updateList: rePackage = re.sub('\+', '\+', package) if re.match(rePackage, upackage): packageList.discard(package) # merge set from glsa-check and list from emerge system/world packageList.update(updateList) if (int(mainConfig.getMainConfig('verbosity')) > 2): # debugging only print('Number of Packages:', len(packageList)) print('Packages:', packageList) print('Blocking Packages:', blockList) # 6. prebuilt all needed packages (security + system/world) # check for package directory pkgDir = '/usr/portage/packages' packageDir = ShellExecution('emerge --info|grep PKGDIR') if (packageDir.getExitStatus() == 0): infoDir = re.findall("\"(.+)\"", packageDir.getOutput()) else: exception('Problem during execution of: emerge --info|grep PKGDIR') pkgDir = infoDir[0] + '/All' # get prebuilt packages in PKGDIR: prebuiltPackages = [] if (os.path.isdir(pkgDir) == 0): os.makedirs(pkgDir, 0o755) prebuiltDirList = os.listdir(pkgDir) for dirPackage in prebuiltDirList: prebuiltPackage = dirPackage.replace('.tbz2', '') prebuiltPackages.append(prebuiltPackage) # check for already existing prebuilt Packages newCpPList = packageList.copy() for package in newCpPList: # remove package paths prePackage = re.sub('.*/', '', package) for prebuiltPackage in prebuiltPackages: rePrePackage = re.sub('\+', '\+', prePackage) if (re.match(rePrePackage, prebuiltPackage)): if (int(mainConfig.getMainConfig('verbosity')) > 2): # debugging only print("Removing package:", package) mail.addExistingPackage(package) packageList.discard(package) # exit if there are no packages to update if (len(packageList) == 0): if (mainConfig.getMainConfig('rerunconfig')): os.execv('/etc/cron.daily/glcu', [ '/etc/cron.daily/glcu', mainConfig.getMainConfig('rerunconfig') ]) else: if (int(mainConfig.getMainConfig('verbosity')) > 2): # debugging only print('no packages to update. Exiting...') sys.exit(0) # check for packages with dependencies (which can't be prebuild) depCpPList = packageList.copy() for depPackage in depCpPList: dependencyCheckCommand = 'emerge --pretend =' + depPackage checkDep = ShellExecution(dependencyCheckCommand) if (len(re.findall("\[ebuild|\[blocks", checkDep.getOutput())) != 1): if (int(mainConfig.getMainConfig('verbosity')) > 2): # debugging only print("Removing package:", depPackage) mail.addPackage(depPackage, -1, checkDep.getOutput()) packageList.discard(depPackage) # prebuilding packages for package in packageList: prebuildCommand = 'emerge --buildpkgonly --oneshot =' + package prebuild = ShellExecution(prebuildCommand) mail.addPackage(package, prebuild.getExitStatus(), prebuild.getOutput()) # 7. save status to tmpfile for easy system update # and send eMail with report and HowTo update your system mail.writeMail() if (mainConfig.getMainConfig('rerunconfig')): os.execv( '/etc/cron.daily/glcu', ['/etc/cron.daily/glcu', mainConfig.getMainConfig('rerunconfig')]) if (int(mainConfig.getMainConfig('verbosity')) > 2): print("\ncronExecution finished!\n\n")
def apt_sync(self, repo): """ Handle copying of http:// and ftp:// debian repos. """ # warn about not having mirror program. mirror_program = "/usr/bin/debmirror" if not os.path.exists(mirror_program): utils.die(self.logger, "no %s found, please install it" % (mirror_program)) cmd = "" # command to run # detect cases that require special handling if repo.rpm_list != "" and repo.rpm_list != []: utils.die(self.logger, "has_rpm_list not yet supported on apt repos") if not repo.arch: utils.die(self.logger, "Architecture is required for apt repositories") # built destination path for the repo dest_path = os.path.join("/var/www/cobbler/repo_mirror", repo.name) if repo.mirror_locally: # NOTE: Dropping @@suite@@ replace as it is also dropped from # from manage_import_debian_ubuntu.py due that repo has no os_version # attribute. If it is added again it will break the Web UI! # mirror = repo.mirror.replace("@@suite@@",repo.os_version) mirror = repo.mirror idx = mirror.find("://") method = mirror[:idx] mirror = mirror[idx + 3:] idx = mirror.find("/") host = mirror[:idx] mirror = mirror[idx:] dists = ",".join(repo.apt_dists) components = ",".join(repo.apt_components) mirror_data = "--method=%s --host=%s --root=%s --dist=%s --section=%s" % (pipes.quote(method), pipes.quote(host), pipes.quote(mirror), pipes.quote(dists), pipes.quote(components)) rflags = "--nocleanup" for x in repo.yumopts: if repo.yumopts[x]: rflags += " %s %s" % (x, repo.yumopts[x]) else: rflags += " %s" % x cmd = "%s %s %s %s" % (mirror_program, rflags, mirror_data, dest_path) cmd = "%s %s %s %s" % (mirror_program, rflags, mirror_data, pipes.quote(dest_path)) if repo.arch == "src": cmd = "%s --source" % cmd else: arch = repo.arch if arch == "x86": arch = "i386" # FIX potential arch errors if arch == "x86_64": arch = "amd64" # FIX potential arch errors cmd = "%s --nosource -a %s" % (cmd, arch) # Set's an environment variable for subprocess, otherwise debmirror will fail # as it needs this variable to exist. # FIXME: might this break anything? So far it doesn't os.putenv("HOME", "/var/lib/cobbler") rc = utils.subprocess_call(self.logger, cmd) if rc != 0: utils.die(self.logger, "cobbler reposync failed")
def main(): # Extract settings from git repo repo = git_config_get('githubmerge.repository') host = git_config_get('githubmerge.host','*****@*****.**') opt_branch = git_config_get('githubmerge.branch',None) testcmd = git_config_get('githubmerge.testcmd') ghtoken = git_config_get('user.ghtoken') signingkey = git_config_get('user.signingkey') if repo is None: print("ERROR: No repository configured. Use this command to set:", file=stderr) print("git config githubmerge.repository <owner>/<repo>", file=stderr) sys.exit(1) if signingkey is None: print("ERROR: No GPG signing key set. Set one using:",file=stderr) print("git config --global user.signingkey <key>",file=stderr) sys.exit(1) if host.startswith(('https:','http:')): host_repo = host+"/"+repo+".git" else: host_repo = host+":"+repo # Extract settings from command line args = parse_arguments() pull = str(args.pull[0]) # Receive pull information from github info = retrieve_pr_info(repo,pull,ghtoken) if info is None: sys.exit(1) title = info['title'].strip() body = info['body'].strip() # precedence order for destination branch argument: # - command line argument # - githubmerge.branch setting # - base branch for pull (as retrieved from github) # - 'master' branch = args.branch or opt_branch or info['base']['ref'] or 'master' # Initialize source branches head_branch = 'pull/'+pull+'/head' base_branch = 'pull/'+pull+'/base' merge_branch = 'pull/'+pull+'/merge' local_merge_branch = 'pull/'+pull+'/local-merge' devnull = open(os.devnull, 'w', encoding="utf8") try: subprocess.check_call([GIT,'checkout','-q',branch]) except subprocess.CalledProcessError: print("ERROR: Cannot check out branch %s." % (branch), file=stderr) sys.exit(3) try: subprocess.check_call([GIT,'fetch','-q',host_repo,'+refs/pull/'+pull+'/*:refs/heads/pull/'+pull+'/*', '+refs/heads/'+branch+':refs/heads/'+base_branch]) except subprocess.CalledProcessError: print("ERROR: Cannot find pull request #%s or branch %s on %s." % (pull,branch,host_repo), file=stderr) sys.exit(3) try: subprocess.check_call([GIT,'log','-q','-1','refs/heads/'+head_branch], stdout=devnull, stderr=stdout) except subprocess.CalledProcessError: print("ERROR: Cannot find head of pull request #%s on %s." % (pull,host_repo), file=stderr) sys.exit(3) try: subprocess.check_call([GIT,'log','-q','-1','refs/heads/'+merge_branch], stdout=devnull, stderr=stdout) except subprocess.CalledProcessError: print("ERROR: Cannot find merge of pull request #%s on %s." % (pull,host_repo), file=stderr) sys.exit(3) subprocess.check_call([GIT,'checkout','-q',base_branch]) subprocess.call([GIT,'branch','-q','-D',local_merge_branch], stderr=devnull) subprocess.check_call([GIT,'checkout','-q','-b',local_merge_branch]) try: # Go up to the repository's root. toplevel = subprocess.check_output([GIT,'rev-parse','--show-toplevel']).strip() os.chdir(toplevel) # Create unsigned merge commit. if title: firstline = 'Merge #%s: %s' % (pull,title) else: firstline = 'Merge #%s' % (pull,) message = firstline + '\n\n' message += subprocess.check_output([GIT,'log','--no-merges','--topo-order','--pretty=format:%h %s (%an)',base_branch+'..'+head_branch]).decode('utf-8') message += '\n\nPull request description:\n\n ' + body.replace('\n', '\n ') + '\n' try: subprocess.check_call([GIT,'merge','-q','--commit','--no-edit','--no-ff','-m',message.encode('utf-8'),head_branch]) except subprocess.CalledProcessError: print("ERROR: Cannot be merged cleanly.",file=stderr) subprocess.check_call([GIT,'merge','--abort']) sys.exit(4) logmsg = subprocess.check_output([GIT,'log','--pretty=format:%s','-n','1']).decode('utf-8') if logmsg.rstrip() != firstline.rstrip(): print("ERROR: Creating merge failed (already merged?).",file=stderr) sys.exit(4) symlink_files = get_symlink_files() for f in symlink_files: print("ERROR: File %s was a symlink" % f) if len(symlink_files) > 0: sys.exit(4) # Put tree SHA512 into the message try: first_sha512 = tree_sha512sum() message += '\n\nTree-SHA512: ' + first_sha512 except subprocess.CalledProcessError: print("ERROR: Unable to compute tree hash") sys.exit(4) try: subprocess.check_call([GIT,'commit','--amend','-m',message.encode('utf-8')]) except subprocess.CalledProcessError: print("ERROR: Cannot update message.", file=stderr) sys.exit(4) print_merge_details(pull, title, branch, base_branch, head_branch) print() # Run test command if configured. if testcmd: if subprocess.call(testcmd,shell=True): print("ERROR: Running %s failed." % testcmd,file=stderr) sys.exit(5) # Show the created merge. diff = subprocess.check_output([GIT,'diff',merge_branch+'..'+local_merge_branch]) subprocess.check_call([GIT,'diff',base_branch+'..'+local_merge_branch]) if diff: print("WARNING: merge differs from github!",file=stderr) reply = ask_prompt("Type 'ignore' to continue.") if reply.lower() == 'ignore': print("Difference with github ignored.",file=stderr) else: sys.exit(6) else: # Verify the result manually. print("Dropping you on a shell so you can try building/testing the merged source.",file=stderr) print("Run 'git diff HEAD~' to show the changes being merged.",file=stderr) print("Type 'exit' when done.",file=stderr) if os.path.isfile('/etc/debian_version'): # Show pull number on Debian default prompt os.putenv('debian_chroot',pull) subprocess.call([BASH,'-i']) second_sha512 = tree_sha512sum() if first_sha512 != second_sha512: print("ERROR: Tree hash changed unexpectedly",file=stderr) sys.exit(8) # Sign the merge commit. print_merge_details(pull, title, branch, base_branch, head_branch) while True: reply = ask_prompt("Type 's' to sign off on the above merge, or 'x' to reject and exit.").lower() if reply == 's': try: subprocess.check_call([GIT,'commit','-q','--gpg-sign','--amend','--no-edit']) break except subprocess.CalledProcessError: print("Error while signing, asking again.",file=stderr) elif reply == 'x': print("Not signing off on merge, exiting.",file=stderr) sys.exit(1) # Put the result in branch. subprocess.check_call([GIT,'checkout','-q',branch]) subprocess.check_call([GIT,'reset','-q','--hard',local_merge_branch]) finally: # Clean up temporary branches. subprocess.call([GIT,'checkout','-q',branch]) subprocess.call([GIT,'branch','-q','-D',head_branch],stderr=devnull) subprocess.call([GIT,'branch','-q','-D',base_branch],stderr=devnull) subprocess.call([GIT,'branch','-q','-D',merge_branch],stderr=devnull) subprocess.call([GIT,'branch','-q','-D',local_merge_branch],stderr=devnull) # Push the result. while True: reply = ask_prompt("Type 'push' to push the result to %s, branch %s, or 'x' to exit without pushing." % (host_repo,branch)).lower() if reply == 'push': subprocess.check_call([GIT,'push',host_repo,'refs/heads/'+branch]) break elif reply == 'x': sys.exit(1)
'zoo', ) # Mixpanel settings BROKER_BACKEND = 'django' CELERY_ALWAYS_EAGER = True MIXPANEL_API_TOKEN = '29a29954d34fd707f1118f09b6b64a8f' import djcelery djcelery.setup_loader() # CasperJS settings PHANTOMJS_BIN = os.path.abspath( os.path.join(BASE_DIR, '../utils/phantomjs/bin/phantomjs')) # Need this for casperjs binary os.putenv('PHANTOMJS_EXECUTABLE', PHANTOMJS_BIN) CASPERJS_BIN = os.path.abspath( os.path.join(BASE_DIR, '../utils/casperjs/bin/casperjs')) FETCH_SCHEDULE_SCRIPT = os.path.abspath( os.path.join(BASE_DIR, '../utils/scripts/schedule.js')) # Ldap settings LDAP_SERVER = 'ldaps://directory.uoguelph.ca' # Read the Docs documentation URL READ_THE_DOCS_URL = 'http://guelphdev-api-service.readthedocs.org/en/latest/' # Try to import our local settings (for database, etc) # DO NOT remove this try: from local_settings import *
def os_func(): ''' 操作系统模块 该模块下的方法,对各个版本的兼容不明确,须谨慎使用. 测试版本: Python:3.6.1 Windows:Windows10,64bit ''' # === 系统 === strs = os.name # 当前系统: Linux'posix' / Windows'nt' / 'ce' / 'java' strs = os.sep # 分隔符 \\ (windows:\\ linux:/) strs = os.pathsep # path变量分隔符 ; (windows:; linux::) strs = os.linesep # 换行分隔符 \r\n (windows:/r/n linux:\n) dics = os.environ # 查看系统环境变量(字典) strs = os.getenv("Path", default=-1) # 读取环境变量, 没有返回None os.putenv("Path", "C:\\python") # 添加环境变量 (windows无效) os.unsetenv("Path") # 删除环境变量 (windows不可用) strs = os.getlogin() # 当前登录的用户名 num = os.getpid() # 当前进程PID num = os.system("cmd") # 执行操作系统命令, 返回0/1(0执行正确;1执行错误) strs = os.popen("dir").read() # 执行系统命令,并去读结果 tups = os.times( ) # 当前进程时间(user用户 / system系统 / children_user子用户(windews=0) / children_system子系统(windews=0) / elapsed过去时间) bytes = os.urandom(10) # n字节用以加密的随机字符 num = os.cpu_count() # CUP数量 # === 进程 === os.abort() # 结束进程 # execl(file, *args) / execle / execlp / execlpe / execv / execve / execvp / execvpe // 运行新程序替换当前进程 os.execl(r"C:\python", 'python', 'hello.py', 'i') # (windows执行失败) os._exit(0) # 退出 os.kill( 8480, signal.SIGTERM ) # (系统) 终止进程(需要导入:signal) SIGINT (终止进程) / SIGTERM (终止进程) / SIGKILL (终止进程) / SIGALRM (闹钟信号) # === 文件 / 文件夹 === strs = os.getcwd() # 当前路径 bytes = os.getcwdb() # 当前路径 os.chdir(r"C:\Windows") # 切换路径 strs = os.curdir # 当前目录 . strs = os.pardir # 上级目录 .. strs = os.sep # 路径分隔符 ('/' or '\\') bytes = os.fsencode(r"C:\c.obj") # (编码) 文件路径字符串转为bytes类型 => b'C:\\c.obj' strs = os.fsdecode(b"C:\c.obj") # (解码) 文件路径转为strs类型 => 'C:\\c.obj' # chmod(path, mode, *, dir_fd=None, follow_symlinks=True) os.chmod(r"C:\python\hello.py", 777) # 修改模式 os.link("file.txt", "file.txt.link") # 创建硬链接 os.symlink("file.txt", "file.txt.link") # 创建软链接 (windows执行失败) lists = os.listdir() # 所有文件和文件夹(列表) ".""..""D:" # lstat(path, *, dir_fd=None) tups = os.lstat(r"C:\c.obj") # 状态信息列表 boolean = os.access( r"C:\c.obj", os.F_OK ) # (文件/文件夹) 权限测试 (mode: F_OK存在? / R_OK可读? / W_OK可写? / X_OK可执行?) # scandir(path='.') // DirEntry迭代器, 文件目录 lists = os.scandir() # st_atime (访问时间) / st_ctime (修改时间) / st_mtime (模式修改时间) / st_size (大小(字节bytes)) # st_uid (用户ID) / st_gid (用户组ID) # st_ino (inode) / st_mode (模式) / st_dev (设备) / st_nlink (硬链接) # count = cpu_count() # (系统) CPU线程数(非核数) tups = os.stat(".") # 获取状态信息, 返回stat_result对象 # utime(path, times=None, *, ns=None, dir_fd=None, follow_symlinks=True) // 更新文件的访问和修改时间 num = os.utime(r"C:\c.obj") # walk(top, topdown=True, onerror=None, followlinks=False) // 根目录(top)遍历目录树,返回迭代器 (dirpath, dirnames[], filenames[]). root, dirnames, filenames = os.walk(r"C:\python") os.removedirs(r"c:\python") # 删除多个文件夹 (windews删除多个文件夹失败,单个成功) # mkdir(path, mode=0o777, *, dir_fd=None) // 创建单个目录, 文件夹已存在,抛 FileExistsError 异常 os.mkdir("test") # makedirs(name, mode=0o777, exist_ok=False) // 创建(多个)文件夹 os.makedirs(r"./t1/t2/t3") os.rmdir("test") # 删除单个目录 # mknod(path, mode=0o600, device=0, *, dir_fd=None) // 创建空文件 (windows不可用) os.mknod("test.txt") # remove(path, *, dir_fd=None) os.remove("test.txt") # 删除文件 # rename(src, dst, *, src_dir_fd=None, dst_dir_fd=None) os.rename("text.txt", "file.txt") # 重命名 os.renames("text.txt", "file.txt") # replace(src, dst, *, src_dir_fd=None, dst_dir_fd=None) // 重命名, 存在则替换 os.replace("text.txt", "file.txt") tups = os.stat(r"text.txt") # 文件属性 # === 文件读写 === # open(path, flags, mode=0o777, *, dir_fd=None) // 打开文件 fd:文件描述符 fd = os.open(r"C:\c.obj", os.O_RDWR | os.O_CREAT) readfd, writefd = os.pipe() # 打开管道,返回读取,写入 (windows失败) # fdopen(fd, *args, **kwargs) // 打开 (windews写入失败,读取""字符) f = os.fdopen(readfd) os.read(fd, 150) # 读取 os.write(fd, "String".encode("utf-8")) # 写入 os.fsync(fd) # 强行写入 os.ftruncate(fd, 100) # 裁剪文件 bytes = os.lseek( fd, 10, os.SEEK_SET) # 设置指针 SEEK_SET(0开始) SEEK_CUR(1当前位置) SEEK_END(2末尾) fd_temp = os.dup(fd) # 副本 boolean = os.isatty(fd) # 是否是tty设备 stat = os.fstat(fd) # 状态信息 strs = os.device_encoding(fd) # 返回终端字符编码,非终端None os.close(fd) # 关闭 os.closerange(fd, fd) # 关闭并忽略错误 # === DirEntry === for dir in os.scandir(): strs = dir.name # 文件名 strs = dir.path # 完整路径名 num = dir.inode() # inode编号 boolean = dir.is_dir() # 是否是文件夹 boolean = dir.is_file() # 是否是文件 boolean = dir.is_symlink() # 是否是链接 tups = dir.stat() # 状态信息的stat_result对象
def call(*args, **kargs): """Call a subprocess with subprocess.call but setup the pwd env var first. """ os.putenv('PWD', os.getcwd()) return subprocess.call(*args, **kargs)
print space.select(i) # master server master = server # Re-deploy server to cleanup Sophia data master.stop() master.cleanup() master.deploy() master.admin("box.schema.user.create('%s', { password = '******'})" % (LOGIN, PASSWORD)) master.admin("box.schema.user.grant('%s', 'read,write,execute', 'universe')" % LOGIN) master.iproto.py_con.authenticate(LOGIN, PASSWORD) master.uri = '%s:%s@%s' % (LOGIN, PASSWORD, master.iproto.uri) os.putenv('MASTER', master.uri) # replica server replica = TarantoolServer() replica.script = "replication/replica.lua" replica.vardir = server.vardir #os.path.join(server.vardir, 'replica') replica.deploy() replica.admin( "while box.info.server.id == 0 do require('fiber').sleep(0.01) end") replica.uri = '%s:%s@%s' % (LOGIN, PASSWORD, replica.iproto.uri) replica.admin( "while box.space['_priv']:len() < 1 do require('fiber').sleep(0.01) end") replica.iproto.py_con.authenticate(LOGIN, PASSWORD) for engine in engines: master.admin("s = box.schema.space.create('%s', { engine = '%s'})" %
import os from Utils.utils import decodeImage from flask import Flask, request, jsonify, render_template from flask_cors import CORS, cross_origin from predict import malaria os.putenv('LANG', 'en_US.UTF-8') os.putenv('LC_ALL', 'en_US.UTF-8') app = Flask(__name__, template_folder='templates', static_folder='assets') CORS(app) app.config['DEBUG'] = True # @cross_origin() class ClientApp: def __init__(self): self.filename = "inputImage.jpg" self.classifier = malaria(self.filename) @app.route("/", methods=['GET']) @cross_origin() def homePage(): return render_template('index.html') @app.route("/predict", methods=['POST']) @cross_origin()
import os import pygame task_states = { 'begin': 1, 'active': 2, 'hold': 3, 'success': 4, 'fail': 5, 'end': 6 } #LICORICE_ROOT = os.environ['LICORICE_ROOT'] # this sadly doesn't work because the env isn't present when timer calls it LICORICE_ROOT = '../../licorice' # this is fragile, need a better way to do this AUDIO_PATH = 'examples/media' os.putenv('AUDIODRIVER', 'alsa') #os.putenv('AUDIODEV', 'plughw:0,1') # set this to the card,device to output sound to pygame.mixer.pre_init(buffer=512) pygame.mixer.init() sound_success = pygame.mixer.Sound( os.path.join(LICORICE_ROOT, AUDIO_PATH, 'CEGC_success_glockenspiel.wav')) sound_fail = pygame.mixer.Sound( os.path.join(LICORICE_ROOT, AUDIO_PATH, 'C#C_failure.wav')) state_prev = 0
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys import glob import subprocess os.putenv("SAC_DISPLAY_COPYRIGHT", '0') if len(sys.argv) != 2: sys.exit("Usage: python %s dirname" % sys.argv[0]) dir = sys.argv[1] os.chdir(dir) # cd进数据所在目录,以避免处理路径 for seed in glob.glob("*.seed"): subprocess.call(['rdseed', '-pdf', seed]) os.chdir("..")
#!/usr/bin/env python #this script creates gzip arkiv of postgresql database import ConfigParser, os, time, sys try: sys.argv[1], sys.argv[2] except: sys.exit("Usage of this script: " + sys.argv[0] + " database path_to_location_gzipped_backup") data = time.strftime("%d%m-%H_%M") # this is a timestamp db = sys.argv[1] path = sys.argv[2] + '/' #parsing configfile for credentials config = ConfigParser.ConfigParser() config.read("/backup/postgres.cnf") p_username = config.get('backups', 'user') p_password = config.get('backups', 'password') p_hostname = config.get('backups', 'host') p_port = config.get('backups', 'port') os.putenv('PGPASSWORD', p_password) #print p_username, p_password, p_hostname, p_port #script = os.system('pg_dump -h127.0.0.1 -Ubackup yachad | gzip >' + data + 'pgdump.zip') execution = 'pg_dump -h127.0.0.1 -Ubackup %s | gzip > %s%s_%s.gzip' % ( db, path, data, db) script = os.system(execution) script
def handle(self, *args, **options): self.stdout.write('html: %s' % options['html'].split(',')) self.stdout.write('input: %s' % options['input']) self.stdout.write('size: %s' % options['size']) self.stdout.write('wif: %s' % options['wif']) self.stdout.write('change: %s' % options['change']) htmlFiles = options['html'].split(',') inputCoins = options['input'] unitSize = options['size'] signingWif = options['wif'] changeAddress = options['change'] # STEP 1: load all of the voting card bitcoin addresses from HTML files btcAddresses = [] for htmlFile in htmlFiles: # TODO: use a function here to scrape the stuff. # this function should return a list of bitcoin addresses!!! btcAddresses += scrapeAddresses(htmlFile) #pass # STEP 2: build the transaction with all bitcoin addresses as the # transaction outputs. line = ['tx', '-i', inputCoins] # add each voting card bitcoin address to the command 'line', followed # by the amount of satoshi units. for address in btcAddresses: line += ['%s/%s' % (address, unitSize)] line += [changeAddress] # output of the unsigned transaction hex unsignedTxFile = '%s_%s.unsigned.tx' % (inputCoins, changeAddress) line += ['-o', unsignedTxFile] # STEP 3: finally -- build the transaction! # TODO: # use Popen to run this baby? maybe a better solution since we do not # need the text from the transaction creation... #self.stdout.write('Command for building the transaction: {}'.format( # ' '.join(line) #)) environment = ( ('PYCOIN_CACHE_DIR', '~/.pycoin_cache'), ('PYCOIN_SERVICE_PROVIDERS', 'BLOCKR_IO:BLOCKCHAIN_INFO:BITEASY:BLOCKEXPLORER'), ) for (k, v) in environment: os.putenv(k, v) subprocess.call(line, shell=False, stdout=subprocess.PIPE) # STEP 4: sign the transaction, with the output going directly to # standard output. signedTxFile = str(unsignedTxFile).replace('unsigned', 'signed') # line.pop() line = ['tx', str(unsignedTxFile), signingWif, '-o', signedTxFile] # TODO: send the 'line' to the system command line, and allow the output # to be displayed on the screen. #print subprocess.Popen("echo Hello World", shell=True, stdout=subprocess.PIPE).stdout.read() subprocess.call(line, shell=False, stdout=subprocess.PIPE) #print result line = ['tx', signedTxFile] result = [] result = subprocess.Popen(line, shell=False, stdout=subprocess.PIPE) with result.stdout as f: output = f.read() return output
def setup(): os.putenv('PWD', pwd) os.chdir(pwd)
def makeCaFiles(dir, njobs=-1, jobid=0, nf=[0], maxfiles=-1, site="cern.ch"): dir = str(dir) return_files = [] try: ld_path = os.getenv("LD_LIBRARY_PATH") except: ld_path = "" if not "/afs/cern.ch/project/eos/installation/pro/lib64/" in ld_path: os.putenv( "LD_LIBRARY_PATH", "%s:%s" % (ld_path, "/afs/cern.ch/project/eos/installation/pro/lib64/")) replace = None ls = None prepend = None if dir.startswith("/castor"): ls = 'nsls %s' prepend = 'rfio://' else: sh = siteHandling[site] while type(sh) == str: sh = siteHandling[sh] ls = sh["ls"] prepend = sh.get("prepend", None) prepend_dir = sh.get("prepend_dir", None) replace = sh.get("replace", None) field = sh.get("field", None) sc = None for i in range(3): sc, flist = commands.getstatusoutput(ls % dir) if sc: break files = flist.split('\n') if field: tmp = [] for f in files: toks = [t for t in f.split(" ") if t != ""] print toks if len(toks) > field: tmp.append(toks[field]) files = tmp files.sort() if not sc: ifile = 0 for f in files: if '.root' in f: if (maxfiles > 0 and ifile >= maxfiles): break ifile += 1 nf[0] += 1 fname = f if replace: fname = fname.replace(*replace) if prepend: if prepend_dir: fname = "%s%s/%s" % (prepend, dir, fname) else: fname = "%s%s" % (prepend, fname) if (njobs > 0) and (nf[0] % njobs != jobid): return_files.append((fname, False)) else: return_files.append((fname, True)) else: sys.exit("No Such Directory: %s\n%s\n%s" % (dir, flist, str(files))) if nf[0] == 0: sys.exit("No .root Files found in directory - %s:\n%s" % (dir, flist)) return return_files
app.register_blueprint(sse, url_prefix='/stream') runningProcs = [] # Load classification model clf = load('personClassifier.joblib') #low range of the sensor (this will be blue on the screen) MINTEMP = 18 #high range of the sensor (this will be red on the screen) MAXTEMP = 30 #how many color values we can have COLORDEPTH = 1024 os.putenv('SDL_FBDEV', '/dev/fb1') #initialize the sensor sensor = Adafruit_AMG88xx() points = [(math.floor(ix / 8), (ix % 8)) for ix in range(0, 64)] grid_x, grid_y = np.mgrid[0:7:32j, 0:7:32j] #sensor is an 8x8 grid so lets do a square height = 240 width = 240 #the list of colors we can choose from blue = Color("indigo") colors = list(blue.range_to(Color("red"), COLORDEPTH))
def runGdal(commands, feedback=None): if feedback is None: feedback = QgsProcessingFeedback() envval = os.getenv('PATH') # We need to give some extra hints to get things picked up on OS X isDarwin = False try: isDarwin = platform.system() == 'Darwin' except IOError: # https://travis-ci.org/m-kuhn/QGIS#L1493-L1526 pass if isDarwin and os.path.isfile( os.path.join(QgsApplication.prefixPath(), "bin", "gdalinfo")): # Looks like there's a bundled gdal. Let's use it. os.environ['PATH'] = "{}{}{}".format( os.path.join(QgsApplication.prefixPath(), "bin"), os.pathsep, envval) os.environ['DYLD_LIBRARY_PATH'] = os.path.join( QgsApplication.prefixPath(), "lib") else: # Other platforms should use default gdal finder codepath settings = QgsSettings() path = settings.value('/GdalTools/gdalPath', '') if not path.lower() in envval.lower().split(os.pathsep): envval += '{}{}'.format(os.pathsep, path) os.putenv('PATH', envval) fused_command = ' '.join([str(c) for c in commands]) QgsMessageLog.logMessage(fused_command, 'Processing', Qgis.Info) feedback.pushInfo('GDAL command:') feedback.pushCommandInfo(fused_command) feedback.pushInfo('GDAL command output:') success = False retry_count = 0 while not success: loglines = [] loglines.append('GDAL execution console output') try: with subprocess.Popen( fused_command, shell=True, stdout=subprocess.PIPE, stdin=subprocess.DEVNULL, stderr=subprocess.STDOUT, universal_newlines=True, ) as proc: for line in proc.stdout: feedback.pushConsoleInfo(line) loglines.append(line) success = True except IOError as e: if retry_count < 5: retry_count += 1 else: raise IOError( str(e) + u'\nTried 5 times without success. Last iteration stopped after reading {} line(s).\nLast line(s):\n{}' .format(len(loglines), u'\n'.join(loglines[-10:]))) QgsMessageLog.logMessage('\n'.join(loglines), 'Processing', Qgis.Info) GdalUtils.consoleOutput = loglines
def main(): cur_platform = '??' llvm_path = '??' ndk_root = _check_ndk_root_env() # del the " in the path ndk_root = re.sub(r"\"", "", ndk_root) python_bin = _check_python_bin_env() platform = sys.platform if platform == 'win32': cur_platform = 'windows' elif platform == 'darwin': cur_platform = platform elif 'linux' in platform: cur_platform = 'linux' else: print 'Your platform is not supported!' sys.exit(1) if platform == 'win32': x86_llvm_path = os.path.abspath( os.path.join(ndk_root, 'toolchains/llvm-3.3/prebuilt', '%s' % cur_platform)) else: x86_llvm_path = os.path.abspath( os.path.join(ndk_root, 'toolchains/llvm-3.3/prebuilt', '%s-%s' % (cur_platform, 'x86'))) x64_llvm_path = os.path.abspath( os.path.join(ndk_root, 'toolchains/llvm-3.3/prebuilt', '%s-%s' % (cur_platform, 'x86_64'))) if os.path.isdir(x86_llvm_path): llvm_path = x86_llvm_path elif os.path.isdir(x64_llvm_path): llvm_path = x64_llvm_path else: print 'llvm toolchain not found!' print 'path: %s or path: %s are not valid! ' % (x86_llvm_path, x64_llvm_path) sys.exit(1) project_root = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..')) cocos_root = os.path.abspath(os.path.join(project_root, '')) cxx_generator_root = os.path.abspath( os.path.join(project_root, 'tools/bindings-generator')) # save config to file config = ConfigParser.ConfigParser() config.set('DEFAULT', 'androidndkdir', ndk_root) config.set('DEFAULT', 'clangllvmdir', llvm_path) config.set('DEFAULT', 'cocosdir', cocos_root) config.set('DEFAULT', 'cxxgeneratordir', cxx_generator_root) config.set('DEFAULT', 'extra_flags', '') # To fix parse error on windows, we must difine __WCHAR_MAX__ and undefine __MINGW32__ . if platform == 'win32': config.set('DEFAULT', 'extra_flags', '-D__WCHAR_MAX__=0x7fffffff -U__MINGW32__') conf_ini_file = os.path.abspath( os.path.join(os.path.dirname(__file__), 'userconf.ini')) print 'generating userconf.ini...' with open(conf_ini_file, 'w') as configfile: config.write(configfile) # set proper environment variables if 'linux' in platform or platform == 'darwin': os.putenv('LD_LIBRARY_PATH', '%s/libclang' % cxx_generator_root) if platform == 'win32': path_env = os.environ['PATH'] os.putenv( 'PATH', r'%s;%s\libclang;%s\tools\win32;' % (path_env, cxx_generator_root, cxx_generator_root)) try: tolua_root = '%s/tools/tolua' % project_root output_dir = '%s/cocos/scripting/lua-bindings/auto' % project_root cmd_args = { # 'cocos2dx.ini' : ('cocos2d-x', 'lua_cocos2dx_auto'), \ # 'cocos2dx_extension.ini' : ('cocos2dx_extension', 'lua_cocos2dx_extension_auto'), \ # 'cocos2dx_ui.ini' : ('cocos2dx_ui', 'lua_cocos2dx_ui_auto'), \ # 'cocos2dx_studio.ini' : ('cocos2dx_studio', 'lua_cocos2dx_studio_auto'), \ # 'cocos2dx_spine.ini' : ('cocos2dx_spine', 'lua_cocos2dx_spine_auto'), \ # 'cocos2dx_physics.ini' : ('cocos2dx_physics', 'lua_cocos2dx_physics_auto'), \ # 'cocos2dx_experimental_video.ini' : ('cocos2dx_experimental_video', 'lua_cocos2dx_experimental_video_auto'), \ # 'cocos2dx_experimental.ini' : ('cocos2dx_experimental', 'lua_cocos2dx_experimental_auto'), \ # 'cocos2dx_controller.ini' : ('cocos2dx_controller', 'lua_cocos2dx_controller_auto'), \ 'qt_bridge.ini' : ('qt_bridge', 'lua_qt_bridge_auto'), \ } target = 'lua' generator_py = '%s/generator.py' % cxx_generator_root for key in cmd_args.keys(): args = cmd_args[key] cfg = '%s/%s' % (tolua_root, key) print 'Generating bindings for %s...' % (key[:-4]) command = '%s %s %s -s %s -t %s -o %s -n %s' % ( python_bin, generator_py, cfg, args[0], target, output_dir, args[1]) _run_cmd(command) if platform == 'win32': with _pushd(output_dir): _run_cmd('dos2unix *') print '---------------------------------' print 'Generating lua bindings succeeds.' print '---------------------------------' except Exception as e: if e.__class__.__name__ == 'CmdError': print '---------------------------------' print 'Generating lua bindings fails.' print '---------------------------------' sys.exit(1) else: raise
def download(self, url, error_message, timeout, tries, prefer_cached=False): """ Downloads a URL and returns the contents :param url: The URL to download :param error_message: A string to include in the console error that is printed when an error occurs :param timeout: The int number of seconds to set the timeout to :param tries: The int number of times to try and download the URL in the case of a timeout or HTTP 503 error :param prefer_cached: If a cached version should be returned instead of trying a new request :raises: RateLimitException: when a rate limit is hit DownloaderException: when any other download error occurs :return: The string contents of the URL """ if prefer_cached: cached = self.retrieve_cached(url) if cached: return cached self.tmp_file = tempfile.NamedTemporaryFile().name command = [ self.wget, '--connect-timeout=' + str_cls(int(timeout)), '-o', self.tmp_file, '-O', '-', '--secure-protocol=TLSv1' ] user_agent = self.settings.get('user_agent') if user_agent: command.extend(['-U', user_agent]) request_headers = { # Don't be alarmed if the response from the server does not select # one of these since the server runs a relatively new version of # OpenSSL which supports compression on the SSL layer, and Apache # will use that instead of HTTP-level encoding. 'Accept-Encoding': self.supported_encodings() } request_headers = self.add_conditional_headers(url, request_headers) username, password = self.get_username_password(url) if username and password: command.extend( ['--user=%s' % username, '--password=%s' % password]) for name, value in request_headers.items(): command.extend(['--header', "%s: %s" % (name, value)]) secure_url_match = re.match('^https://([^/]+)', url) if secure_url_match is not None: bundle_path = get_ca_bundle_path(self.settings) command.append(u'--ca-certificate=' + bundle_path) command.append('-S') if self.debug: command.append('-d') else: command.append('-q') http_proxy = self.settings.get('http_proxy') https_proxy = self.settings.get('https_proxy') proxy_username = self.settings.get('proxy_username') proxy_password = self.settings.get('proxy_password') if proxy_username: command.append(u"--proxy-user=%s" % proxy_username) if proxy_password: command.append(u"--proxy-password=%s" % proxy_password) if self.debug: console_write( u''' Wget Debug Proxy http_proxy: %s https_proxy: %s proxy_username: %s proxy_password: %s ''', (http_proxy, https_proxy, proxy_username, proxy_password)) command.append(url) if http_proxy: os.putenv('http_proxy', http_proxy) if https_proxy: os.putenv('https_proxy', https_proxy) error_string = None while tries > 0: tries -= 1 try: result = self.execute(command) general, headers = self.parse_output(True) encoding = headers.get('content-encoding') result = self.decode_response(encoding, result) result = self.cache_result('get', url, general['status'], headers, result) return result except (NonCleanExitError): try: general, headers = self.parse_output(False) self.handle_rate_limit(headers, url) if general['status'] == 304: return self.cache_result('get', url, general['status'], headers, None) if general['status'] == 503 and tries != 0: # GitHub and BitBucket seem to rate limit via 503 if tries and self.debug: console_write( u''' Downloading %s was rate limited, trying again ''', url) continue download_error = 'HTTP error %s' % general['status'] except (NonHttpError) as e: download_error = unicode_from_os(e) # GitHub and BitBucket seem to time out a lot if download_error.find('timed out') != -1: if tries and self.debug: console_write( u''' Downloading %s timed out, trying again ''', url) continue error_string = u'%s %s downloading %s.' % (error_message, download_error, url) break raise DownloaderException(error_string)
import os # noinspection PyUnresolvedReferences from typing import * mydir = os.path.split(__file__)[0] dll_dir = os.path.join(mydir, "oes_libs-0.15.7.4-release", "win64") path = os.getenv("PATH") os.putenv('PATH', f"{dll_dir};{path}") # noinspection PyUnresolvedReferences import vnoes from vnoes import * def MdsApiSample_ResubscribeByCodePrefix(channel, pCodeListString): SSE_CODE_PREFIXES = \ "009, 01, 02, " + \ "10, 11, 12, 13, 18, 19, " + \ "20, " + \ "5, " + \ "6, " + \ "#000" SZSE_CODE_PREFIXES = \ "00, " + \ "10, 11, 12, 13, " + \ "15, 16, 17, 18, " + \ "30" + \ "39" MdsApi_SubscribeByStringAndPrefixes(channel,
def main(): option_parser = optparse.OptionParser() option_parser.add_option( '', '--force', help="Push DEPS update to server without prompting", action="store_true", dest="force") options, args = option_parser.parse_args() src_dir = "/usr/local/google/home/%s/dartium_deps_updater/dartium.deps" % os.environ[ "USER"] os.putenv("GIT_PAGER", "") if not os.path.exists(src_dir): print "Error: prior to running this script, you need to check out a Dartium source tree at" print " %s" % src_dir print "Please reserve the above directory for this script and do not use it for other purposes." sys.exit(1) os.chdir(src_dir) # parse DEPS deps = run_cmd([ 'svn', 'cat', 'https://dart.googlecode.com/svn/branches/bleeding_edge/deps/dartium.deps/DEPS' ]) rev_num = {} for repo in REPOSITORIES: revision = 'dartium_%s_revision":\s*"(.+)"' % repo rev_num[repo] = re.search(revision, deps).group(1) # update repos all_revs = [] for repo, (svn_url, _) in REPOSITORY_INFO.items(): output = run_cmd( ["svn", "log", "-r", "HEAD:%s" % rev_num[repo], svn_url]) revs = parse_svn_log(output, repo) if revs and revs[-1]['rev'] == rev_num[repo]: revs.pop() all_revs.append(revs) pending_updates = merge_revs(all_revs) pending_updates.reverse() print print "Current DEPS revisions:" for repo in REPOSITORIES: print ' dartium_%s_revision=%s' % (repo, rev_num[repo]) if len(pending_updates) == 0: print "DEPS is up-to-date." sys.exit(0) else: print "Pending DEPS updates:" for s in pending_updates: print " %s to %s (%s) %s" % (s['repo'], s['rev'], s['isotime'], s['info']) # make the next DEPS update os.chdir(src_dir) run_cmd(['rm', 'DEPS']) print run_cmd(['svn', 'update']) s = pending_updates[0] pattern = re.compile('dartium_' + s['repo'] + '_revision":\s*"(.+)"') new_deps = pattern.sub( 'dartium_' + s['repo'] + '_revision": "' + s['rev'] + '"', deps) write_file('DEPS', new_deps) commit_log = 'DEPS AutoUpdate: %s to %s (%s) %s\n' % ( s['repo'], s['rev'], s['isotime'], s['author']) commit_log += s['info'] + '\n' + commit_url(s['repo'], s['rev']) write_file('commit_log.txt', commit_log) print run_cmd(['svn', 'diff']) print print "Commit log:" print "---------------------------------------------" print commit_log print "---------------------------------------------" if not options.force: print "Ready to push; press Enter to continue or Control-C to abort..." sys.stdin.readline() print run_cmd(['svn', 'commit', '--file', 'commit_log.txt']) print "Done."
from info import Info_box import lib.errors from lib.io import io_streams_log, io_streams_tee import lib.warnings from multi import Application_callback, load_multiprocessor from prompt import interpreter from status import Status status = Status() import user_functions import version # Set up the user functions. user_functions.initialise() # Modify the environmental variables. putenv('PDBVIEWER', 'vmd') def start(mode=None, profile_flag=False): """Execute relax. @keyword mode: Force a relax mode, overriding the command line. @type mode: str @keyword profile_flag: Change this flag to True for code profiling. @type profile_flag: bool """ # Normal relax operation. relax = Relax() # Override normal operation.
def download(self, url, error_message, timeout, tries, prefer_cached=False): """ Downloads a URL and returns the contents :param url: The URL to download :param error_message: A string to include in the console error that is printed when an error occurs :param timeout: The int number of seconds to set the timeout to :param tries: The int number of times to try and download the URL in the case of a timeout or HTTP 503 error :param prefer_cached: If a cached version should be returned instead of trying a new request :raises: NoCaCertException: when no CA certs can be found for the url RateLimitException: when a rate limit is hit DownloaderException: when any other download error occurs :return: The string contents of the URL """ if prefer_cached: cached = self.retrieve_cached(url) if cached: return cached self.tmp_file = tempfile.NamedTemporaryFile().name command = [ self.curl, '--user-agent', self.settings.get('user_agent'), '--connect-timeout', str(int(timeout)), '-sSL', # Don't be alarmed if the response from the server does not select # one of these since the server runs a relatively new version of # OpenSSL which supports compression on the SSL layer, and Apache # will use that instead of HTTP-level encoding. '--compressed', # We have to capture the headers to check for rate limit info '--dump-header', self.tmp_file ] request_headers = self.add_conditional_headers(url, {}) for name, value in request_headers.items(): command.extend(['--header', "%s: %s" % (name, value)]) secure_url_match = re.match('^https://([^/]+)', url) if secure_url_match != None: secure_domain = secure_url_match.group(1) bundle_path = self.check_certs(secure_domain, timeout) command.extend(['--cacert', bundle_path]) debug = self.settings.get('debug') # We always trigger debug output so that we can detect certain errors command.append('-v') http_proxy = self.settings.get('http_proxy') https_proxy = self.settings.get('https_proxy') proxy_username = self.settings.get('proxy_username') proxy_password = self.settings.get('proxy_password') if debug: console_write(u"Curl Debug Proxy", True) console_write(u" http_proxy: %s" % http_proxy) console_write(u" https_proxy: %s" % https_proxy) console_write(u" proxy_username: %s" % proxy_username) console_write(u" proxy_password: %s" % proxy_password) if http_proxy or https_proxy: command.append('--proxy-anyauth') if proxy_username or proxy_password: command.extend(['-U', u"%s:%s" % (proxy_username, proxy_password)]) if http_proxy: os.putenv('http_proxy', http_proxy) if https_proxy: os.putenv('HTTPS_PROXY', https_proxy) command.append(url) error_string = None while tries > 0: tries -= 1 try: output = self.execute(command) with open_compat(self.tmp_file, 'r') as f: headers_str = read_compat(f) self.clean_tmp_file() message = 'OK' status = 200 headers = {} for header in headers_str.splitlines(): if header[0:5] == 'HTTP/': message = re.sub('^HTTP/\d\.\d\s+\d+\s*', '', header) status = int( re.sub('^HTTP/\d\.\d\s+(\d+)(\s+.*)?$', '\\1', header)) continue if header.strip() == '': continue name, value = header.split(':', 1) headers[name.lower()] = value.strip() error, debug_sections = self.split_debug( self.stderr.decode('utf-8')) if debug: self.print_debug(debug_sections) self.handle_rate_limit(headers, url) if status not in [200, 304]: e = NonCleanExitError(22) e.stderr = "%s %s" % (status, message) raise e output = self.cache_result('get', url, status, headers, output) return output except (NonCleanExitError) as e: if hasattr(e.stderr, 'decode'): e.stderr = e.stderr.decode('utf-8', 'replace') # Stderr is used for both the error message and the debug info # so we need to process it to extract the debug info e.stderr, debug_sections = self.split_debug(e.stderr) if debug: self.print_debug(debug_sections) self.clean_tmp_file() if e.returncode == 22: code = re.sub('^.*?(\d+)([\w\s]+)?$', '\\1', e.stderr) if code == '503' and tries != 0: # GitHub and BitBucket seem to rate limit via 503 error_string = u'Downloading %s was rate limited' % url if tries: error_string += ', trying again' if debug: console_write(error_string, True) continue download_error = u'HTTP error ' + code elif e.returncode == 7: # If the user could not connect, check for ipv6 errors and # if so, force curl to use ipv4. Apparently some users have # network configuration where curl will try ipv6 and resolve # it, but their ISP won't actually route it. full_debug = u"\n".join( [section['contents'] for section in debug_sections]) ipv6_error = re.search( '^\s*connect to ([0-9a-f]+(:+[0-9a-f]+)+) port \d+ failed: Network is unreachable', full_debug, re.I | re.M) if ipv6_error and tries != 0: if debug: error_string = u'Downloading %s failed because the ipv6 address %s was not reachable, retrying using ipv4' % ( url, ipv6_error.group(1)) console_write(error_string, True) command.insert(1, '-4') continue else: download_error = e.stderr.rstrip() elif e.returncode == 6: download_error = u'URL error host not found' elif e.returncode == 28: # GitHub and BitBucket seem to time out a lot error_string = u'Downloading %s timed out' % url if tries: error_string += ', trying again' if debug: console_write(error_string, True) continue else: download_error = e.stderr.rstrip() error_string = u'%s %s downloading %s.' % (error_message, download_error, url) break raise DownloaderException(error_string)
# You can also used negative selection, like *=YES followed by Foo.Bar=NO. d0 = dict[0].split('.',1) if (dict[1].upper() == 'YES' or dict[1].upper() == 'Y'): if (len(d0) > 1): filter = filter + dict[0] + ':' else: filter = filter + dict[0] + '.*' + ':' elif (dict[1].upper() == 'NO' or dict[1].upper() == 'N'): if (len(d0) > 1): negfilter = negfilter + dict[0] + ':' else: negfilter = negfilter + dict[0] + '.*' + ':' elif part == 'Environment': os.putenv(dict[0],dict[1]) print '\t%s="%s"' % ( dict[0], dict[1] ) elif part == 'GTestFile': # the file name might contain = character gtestfile = line elif part == 'GTestFile' and line != '': gtestfile = line else: # line is unusable continue except IOError: print >> sys.stderr, 'error opening config file "%s"' % conffile
def try_command(cmd, cwd=None, console=False, timeout=None, env=None): args = top.args console = (console or args.verbose or args.debug) and not args.quiet filelog = args.debug output = '' proc = None timer = None if env is not None: for k, v in env.items(): os.putenv(k, v) try: if args.debug: print("Running %s" % " ".join(cmd)) proc = subprocess.Popen(cmd, cwd=cwd, preexec_fn=os.setsid, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) if timeout: timed_out = [False] timer = Timer(timeout, timeout_killer, [proc, timed_out]) timer.start() if console: while True: line = proc.stdout.readline() if line: output += line print(line, end='') elif proc.poll() is not None: break proc.wait else: output = proc.communicate()[0] if timeout: timer.cancel() rc = proc.returncode proc = None if timeout and timed_out[0]: return output + ("\n%s timed out." % cmd[0]) elif rc == -signal.SIGSEGV: raise Exception("segmentation fault") elif rc and args.verifier != 'symbooglix': raise Exception(output) else: return output except (RuntimeError, OSError) as err: print(output, file=sys.stderr) sys.exit("Error invoking command:\n%s\n%s" % (" ".join(cmd), err)) finally: if timeout and timer: timer.cancel() if proc: os.killpg(os.getpgid(proc.pid), signal.SIGKILL) if filelog: with open(temporary_file(cmd[0], '.log', args), 'w') as f: f.write(output)
import oci import gzip import os import csv import cx_Oracle import time import pytz import logging filename = 'adbs2adw_' + str(datetime.datetime.utcnow()) logging.basicConfig(level=logging.DEBUG, filename=filename, filemode="a+", format="%(asctime)-15s %(levelname)-8s %(message)s") os.putenv("TNS_ADMIN", "/home/opc/wallet/Wallet_ADWshared") naive = datetime.datetime.now() timezone = pytz.timezone("Europe/Berlin") aware1 = naive.astimezone(timezone) current_time = str(aware1.strftime("%Y-%m-%d %H:%M:%S")) ########################################################################## # Print header centered ########################################################################## def print_header(name, category): options = {0: 90, 1: 60, 2: 30} chars = int(options[category]) print("") print('#' * chars)
def main(): option_parser = optparse.OptionParser() option_parser.add_option( '', '--target', help="Update one of [dartium|integration|multivm|clank]", action="store", dest="target", default="dartium") option_parser.add_option( '', '--force', help="Push DEPS update to server without prompting", action="store_true", dest="force") options, args = option_parser.parse_args() target = options.target if not target in TARGETS.keys(): print "Error: invalid target" print "Choose one of " + str(TARGETS) (deps_dir, prefix, repos, branch) = TARGETS[target] deps_file = deps_dir + '/DEPS' src_dir = "/usr/local/google/home/%s/dartium_deps_updater/deps/%s" % ( os.environ["USER"], target) os.putenv("GIT_PAGER", "") if not os.path.exists(src_dir): print run_cmd(['svn', 'co', deps_dir, src_dir]) os.chdir(src_dir) # parse DEPS deps = run_cmd(['svn', 'cat', deps_file]) rev_num = {} for repo in repos: revision = '%s_%s_revision":\s*"(.+)"' % (prefix, repo) rev_num[repo] = re.search(revision, deps).group(1) # update repos all_revs = [] for repo in repos: (svn_url, _) = REPOSITORY_INFO[repo] output = run_cmd( ["svn", "log", "-r", "HEAD:%s" % rev_num[repo], svn_url % branch]) revs = parse_svn_log(output, repo) if revs and revs[-1]['rev'] == rev_num[repo]: revs.pop() all_revs.append(revs) pending_updates = merge_revs(all_revs) pending_updates.reverse() print print "Current DEPS revisions:" for repo in repos: print ' %s_%s_revision=%s' % (prefix, repo, rev_num[repo]) if len(pending_updates) == 0: print "DEPS is up-to-date." sys.exit(0) else: print "Pending DEPS updates:" for s in pending_updates: print " %s to %s (%s) %s" % (s['repo'], s['rev'], s['isotime'], s['info']) # make the next DEPS update os.chdir(src_dir) run_cmd(['rm', 'DEPS']) print run_cmd(['svn', 'update']) s = pending_updates[0] pattern = re.compile(prefix + '_' + s['repo'] + '_revision":\s*"(.+)"') new_deps = pattern.sub( prefix + '_' + s['repo'] + '_revision": "' + s['rev'] + '"', deps) write_file('DEPS', new_deps) commit_log = 'DEPS AutoUpdate: %s to %s (%s) %s\n' % ( s['repo'], s['rev'], s['isotime'], s['author']) commit_log += s['info'] + '\n' + commit_url(s['repo'], s['rev']) write_file('commit_log.txt', commit_log) print run_cmd(['svn', 'diff']) print print "Commit log:" print "---------------------------------------------" print commit_log print "---------------------------------------------" if not options.force: print "Ready to push; press Enter to continue or Control-C to abort..." sys.stdin.readline() print run_cmd(['svn', 'commit', '--file', 'commit_log.txt']) print "Done."
def deploy_linux(qt_path, source_path, build_path, target_path, extra_libs, disable_tools_download): tools_path = None if not disable_tools_download: tools_path = os.path.join( os.path.abspath(os.path.dirname(sys.argv[0])), 'appimage-tools') if not os.path.isdir(tools_path): os.mkdir(tools_path) appdir_deploy_command = get_executable( 'linuxdeploy-x86_64.AppImage', 'https://bintray.com/qtproject/linuxdeploy-mirror/download_file?file_path=2020-06-03%2Flinuxdeploy-x86_64.AppImage', tools_path) appimage_tool_command = get_executable( 'appimagetool-x86_64.AppImage', 'https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage', tools_path) get_executable( 'linuxdeploy-plugin-qt-x86_64.AppImage', 'https://bintray.com/qtproject/linuxdeploy-mirror/download_file?file_path=2020-06-03%2Flinuxdeploy-plugin-qt-x86_64.AppImage', tools_path) appimage_path = os.path.join(target_path, 'otter-browser') make_path(appimage_path, ['usr', 'share', 'applications']) make_path(appimage_path, ['usr', 'share', 'icons', 'hicolor']) make_path(appimage_path, ['usr', 'share', 'otter-browser']) shutil.copy( os.path.join(source_path, 'otter-browser.desktop'), os.path.join(appimage_path, 'usr/share/applications/otter-browser.desktop')) icons_path = os.path.join(appimage_path, 'usr/share/icons/hicolor') icons = [16, 32, 48, 64, 128, 256, 'scalable'] for size in icons: is_raster = isinstance(size, int) icon_directory = '{}x{}'.format(size, size) if is_raster else size make_path(icons_path, [icon_directory, 'apps']) shutil.copy( os.path.join( source_path, 'resources/icons', 'otter-browser-{}.png'.format(size) if is_raster else 'otter-browser.svg'), os.path.join( icons_path, icon_directory, 'apps', 'otter-browser.png' if is_raster else 'otter-browser.svg')) deploy_locale(source_path, os.path.join(appimage_path, 'usr/share/otter-browser')) os.putenv( 'LD_LIBRARY_PATH', '{}:{}'.format(os.path.join(qt_path, 'lib'), os.getenv('LD_LIBRARY_PATH'))) os.putenv('QMAKE', os.path.join(qt_path, 'bin/qmake')) run_command([ appdir_deploy_command, '--plugin=qt', '--executable={}'.format(os.path.join(build_path, 'otter-browser')), '--appdir={}'.format(appimage_path) ]) shutil.rmtree(os.path.join(appimage_path, 'usr/share/doc/'), ignore_errors=True) libs_path = os.path.join(appimage_path, 'usr/lib') redundant_libs = [ 'libgst*-1.0.*', 'libFLAC.*', 'libogg.*', 'libvorbis*.*', 'libmount.*', 'libpulse*.*', 'libsystemd.*', 'libxml2.*' ] for pattern in redundant_libs: for file in glob.glob(os.path.join(libs_path, pattern)): os.unlink(file) for file in glob.glob(os.path.join(libs_path, 'libicu*.*')): if not os.path.exists( os.path.join(qt_path, 'lib', os.path.basename(file))): os.unlink(file) run_command([ appimage_tool_command, appimage_path, os.path.join(target_path, 'otter-browser-x86_64.AppImage') ]) shutil.rmtree(appimage_path)
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Time : 2020/7/30 7:37 # @File : p5_cockie_webdriver.py from selenium import webdriver import time import os # print(os.environ['PATH']) # os.environ['PATH'] = os.environ['PATH'] + ";D:\\Program Files\\Mozilla Firefox\\" # print(os.environ['PATH']) os.putenv('PATH', 'D:\\Program Files\\Mozilla Firefox\\') # putenv() 的调用不会更新 os.environ,因此最好使用 os.environ 对变量赋值。 # print(os.getenv('PATH')) try: # 需要安装Firefox driver,和浏览器版本保持一致,地址: # https://github.com/mozilla/geckodriver/releases brower = webdriver.Firefox() brower.get("https://www.douban.com/") time.sleep(1) brower.switch_to.frame(brower.find_elements_by_tag_name('iframe')[0]) btm1 = brower.find_element_by_xpath('//ul[@class="tab-start"]/li[2]') btm1.click() brower.find_element_by_xpath('//*[@id="username"]').send_keys( '*****@*****.**') brower.find_element_by_id('password').send_keys('DBXXxk19990@') time.sleep(1)