def main(): """Print virtualenv and python version.""" workon_home = os.environ.get('WORKON_HOME') workon_home = Path(workon_home) for virtualenv in workon_home.iterdir(): if virtualenv.is_dir(): for python_bin in Path(f'{virtualenv}/bin/').iterdir(): if python_bin.name == 'python': virtual_environment = str(virtualenv).rpartition('/')[-1] command = [f'{python_bin}', '-c', "import sys;print(sys.version.split()[0]);" ] stdout, _ = Popen(command, stdout=PIPE).communicate() stdout = stdout.decode('utf-8') python_version = stdout.strip() if python_bin.name == 'pip': command = [f'{python_bin}', 'freeze' ] stdout, _ = Popen(command, stdout=PIPE).communicate() stdout = stdout.decode('utf-8') packages = [p.strip() for p in stdout.split()] with open(f'virtualenvs-{os.uname()[1].split(".")[0]}.md', 'a') as f: f.write(template.render(virtualenv=virtual_environment, version=python_version, packages=packages))
def sys_info(request): template = u''' <h1>Revision</h1> %(svn_version)s <h1>settings path</h1> <pre>%(settingspath)s</pre> <h1>Site.objects.get_current()</h1> <table> <tr><th>id</th><td>%(site.pk)s</td></tr> <tr><th>domain</th><td>%(site.domain)s</td></tr> <tr><th>name</th><td>%(site.name)s</td></tr> </table> <h1>svn info</h1> <pre>%(svninfo)s</pre> <h1>sys.path</h1> <pre>%(syspath)s</pre> ''' import sys import settings from django.contrib.sites.models import Site from subprocess import Popen, PIPE svn_version, svn_version_err = Popen(['svnversion', settings.PROJECT_PATH], stdout=PIPE).communicate() svn_version = svn_version.decode('utf-8') if svn_version else u'' svn_version_err = svn_version_err.decode('utf-8') if svn_version_err else u'' site = Site.objects.get_current() svnout, svnerr = Popen(['svn', 'info','--non-interactive','--username=anonymous','--password=4guests@','-r', 'HEAD', settings.PROJECT_PATH], stdout=PIPE).communicate() svnout = svnout.decode('utf-8') if svnout else u'' svnerr = svnerr.decode('utf-8') if svnerr else u'' return HttpResponse(template % {'site.pk':site.pk, 'site.domain':site.domain, 'site.name':site.name, 'svn_version': svn_version + svn_version_err, 'settingspath': settings.PROJECT_PATH, 'syspath':'\n'.join(sys.path), 'svninfo':svnout + svnerr})
def main(): status_before = Popen(['systemctl', '--user', 'status', 'pulseaudio.service'], stdout=subprocess.PIPE).communicate()[0] print(status_before.decode("UTF-8")) Popen(['systemctl', '--user', 'restart', 'pulseaudio.service']).communicate() status_after = Popen(['systemctl', '--user', 'status', 'pulseaudio.service'], stdout=subprocess.PIPE).communicate()[0] print(status_after.decode("UTF-8"))
def createoconv(scene, frame, sim_op, simnode, **kwargs): fbase = "{0}-{1}".format(scene['viparams']['filebase'], frame) with open("{}.oct".format(fbase), "wb") as octfile: err = Popen("oconv -w -".split(), stdin = PIPE, stderr = PIPE, stdout = octfile).communicate(input = simnode['radfiles'][str(frame)].encode(sys.getfilesystemencoding()))[1] if err and 'fatal -' in err.decode(): sim_op.report({'ERROR'}, 'Oconv conversion failure: {}'.format(err.decode())) return 'CANCELLED'
def test_rosnode(self): topics = ['/chatter', '/foo/chatter', '/bar/chatter'] # wait for network to initialize rospy.init_node('test') nodes = ['/talker', '/foo/talker', '/bar/talker', rospy.get_caller_id()] for i, t in enumerate(topics): rospy.Subscriber(t, std_msgs.msg.String, self.callback, i) all = set(range(0, len(topics))) timeout_t = time.time() + 10. while time.time() < timeout_t and self.vals != all: time.sleep(0.1) self.assertEquals(self.vals, all, "failed to initialize graph correctly") # network is initialized cmd = 'rosnode' # list # - we aren't matching against the core services as those can make the test suites brittle output = Popen([cmd, 'list'], stdout=PIPE).communicate()[0] output = output.decode() l = set(output.split()) for t in nodes: self.assert_(t in l, "%s not in %s"%(t, l)) output = Popen([cmd, 'list', '-a'], stdout=PIPE).communicate()[0] output = output.decode() l = set(output.split()) for t in nodes: for e in l: if t in e: break else: self.fail("did not find [%s] in list [%s]"%(t, l)) output = Popen([cmd, 'list', '-u'], stdout=PIPE).communicate()[0] output = output.decode() l = set(output.split()) self.assert_(len(l), "list -u is empty") for e in l: self.assert_(e.startswith('http://')) for name in nodes: # type output = Popen([cmd, 'info', name], stdout=PIPE).communicate()[0] output = output.decode() # not really validating output as much as making sure it's not broken self.assert_(name in output) self.assert_('chatter' in output) self.assert_('Publications' in output) self.assert_('Subscriptions' in output) if 0: #ping stdout, stderr = run_for([cmd, 'ping', name], 3.)
def get_architectures(archi=None, basetgz=None): """ Ensure that the architectures exist "all" keyword can be confusing about the targeted architectures. Consider using the "any" keyword to force the build on all architectures or let lgp find the value in debian/control by itself in doubt. lgp replaces "all" with "current" architecture value :param: archi: str or list name of a architecture :return: list of architecture """ known_archi = Popen(["dpkg-architecture", "-L"], stdout=PIPE).communicate()[0] if sys.version_info >= (3, ): known_archi = known_archi.decode('utf-8') known_archi = known_archi.split() # try to guess targeted architectures if archi is None or len(archi) == 0: archi = guess_debian_architecture() # "all" means architecture-independent. so we can replace by "current" # architecture only if 'all' in archi: archi = ['current'] if 'current' in archi: archi = Popen(["dpkg", "--print-architecture"], stdout=PIPE).communicate()[0] if sys.version_info >= (3, ): archi = archi.decode('utf-8') archi = archi.split() else: if 'any' in archi: if not osp.isdir(basetgz): raise SetupException("default location '%s' for the archived " "chroot images was not found" % basetgz) try: archi = [ osp.basename(f).split('-', 1)[1].split('.')[0] for f in glob.glob(osp.join(basetgz, '*.tgz')) ] except IndexError: raise SetupException( "there is no available chroot images in default location '%s'" "\nPlease run 'lgp setup -c create'" % basetgz) archi = set(known_archi) & set(archi) for a in archi: if a not in known_archi: msg = "architecture '%s' not found in '%s' (create it or unreference it)" raise ArchitectureException(msg % (a, basetgz)) return archi
def check_file_type(file: bytes) -> tuple: if isinstance(file, bytes): b_file_type = Popen("/usr/bin/file -b --mime -", shell=True, stdout=PIPE, stdin=PIPE).communicate(file[:1024])[0].strip() s_file_type = b_file_type.decode('ascii').split()[0].split("/")[1][:-1] return b_file_type.decode("utf-8"), s_file_type else: return ()
def get_time(log_path): if not os.path.exists(log_path): return "-" out, err = Popen( 'grep -h "\[INFO\] Total time:" {} | tail -1 | cut -d \' \' -f5-'. format(log_path), stdout=PIPE, stderr=PIPE, shell=True).communicate() if out.decode().strip() == '': return '-' return out.decode().strip()
def get_version(): """ Returns project version as derived by git. """ branchString = Popen('git rev-parse --abbrev-ref HEAD', stdout=PIPE, shell=True).stdout.read().rstrip() revString = Popen('git describe --always --tags --match "Release_*"', stdout=PIPE, shell=True).stdout.read().rstrip() return "({branch}) {version}".format(branch=branchString.decode('ascii'), version=revString.decode('ascii'))
def get_compilation_status(log_path): if not os.path.exists(log_path): return "UNSTATED" out, err = Popen( 'grep -h \"\[INFO\] BUILD\" {} | tail -1'.format(log_path), stdout=PIPE, stderr=PIPE, shell=True).communicate() if 'FAILURE' in out.decode(): return 'FAILURE' elif 'SUCCESS' in out.decode(): return 'SUCCESS' else: return '-'
def test_auth_openshift(): kiali_hostname = conftest.get_kiali_hostname() cookie_file = "./tmp_cookie_file" try: assert change_configmap_with_new_value( element_name='strategy:', list=STRATEGY_LIST, new_value=STRATEGY_OPENSHIFT, current_configmap_file=conftest.CURRENT_CONFIGMAP_FILE, new_configmap_file=conftest.NEW_CONFIG_MAP_FILE) # Create token cookie file cmd = "curl -v -k POST -c {} -d 'access_token='$(oc whoami -t)'&expires_in=86400&scope=user%3Afull&token_type=Bearer' https://{}/api/authenticate".format( cookie_file, kiali_hostname) with timeout(seconds=120, error_message='Timed out waiting getting token'): while True: stdout, stderr = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate() if 'username' in stdout.decode(): break time.sleep(2) # Make the API request using token cookie cmd = "curl -v -k -b {} https://{}/api/namespaces".format( cookie_file, kiali_hostname) with timeout(seconds=120, error_message='Timed out waiting getting token'): while True: stdout, stderr = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate() if "istio-system" in stdout.decode(): break time.sleep(2) cmd = "rm -f {}".format(cookie_file) Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate() finally: # Return Auth strategy back to 'login' and wait for Kiali to be accessible create_configmap_and_wait_for_kiali(conftest.CURRENT_CONFIGMAP_FILE) make_request(auth_type=AUTH_LOGIN)
def usearch_cluster(outputFolder): #sort by size out, error = Popen(["vsearch", "--sortbysize", outputFolder+"/uniques.fa", "--output", outputFolder+"/uniques_sorted.fa","--minseqlength", "1","--minsize", args.abundance_minsize], stdout=PIPE, stderr=PIPE).communicate() admin_log(outputFolder, out=out.decode(), error=error.decode(), function="sortbysize") if args.cluster == "cluster_otus": out, error = Popen(["usearch11", "-cluster_otus", outputFolder+"/uniques_sorted.fa", "-uparseout", outputFolder+"/cluster_file.txt", "-otus", outputFolder+"/otu_sequences.fa", "-relabel", "Otu", "-fulldp"], stdout=PIPE, stderr=PIPE).communicate() admin_log(outputFolder, out=out.decode(), error=error.decode(), function="cluster_otus") if args.cluster == "unoise": out, error = Popen(["usearch11","-unoise3", outputFolder+"/uniques_sorted.fa", "-unoise_alpha", args.unoise_alpha, "-minsize", args.abundance_minsize, "-tabbedout", outputFolder+"/cluster_file.txt", "-zotus", outputFolder+"/zotususearch.fa"], stdout=PIPE, stderr=PIPE).communicate() admin_log(outputFolder, out=out.decode(), error=error.decode(), function="unoise") count = 1 with open(outputFolder + "/zotususearch.fa") as handle, open(outputFolder + "/otu_sequences.fa", 'a') as newotu: for record in SeqIO.parse(handle, "fasta"): newotu.write(">Otu" + str(count) + "\n") newotu.write(str(record.seq) + "\n") count += 1 Popen(["rm", outputFolder + "/zotususearch.fa"]) if args.cluster == "vsearch": out, error = Popen(["vsearch", "--uchime_denovo", outputFolder+"/uniques_sorted.fa", "--sizein", "--fasta_width", "0", "--nonchimeras", outputFolder+"/non_chimera.fa"], stdout=PIPE, stderr=PIPE).communicate() admin_log(outputFolder, out=out.decode(), error=error.decode(), function="vsearch uchime_denovo") out, error = Popen(["vsearch", "--cluster_size", outputFolder+"/non_chimera.fa", "--id", args.clusterid, "--sizein", "--fasta_width", "0","--minseqlength", "1", "--relabel", "Otu", "--centroids", outputFolder+"/otu_sequences.fa"], stdout=PIPE, stderr=PIPE).communicate() admin_log(outputFolder, out=out.decode(), error=error.decode(), function="vsearch cluster") call(["rm", outputFolder + "/non_chimera.fa"]) if args.cluster == "vsearch_no_chimera_check": out, error = Popen(["vsearch", "--cluster_size", outputFolder+"/uniques_sorted.fa", "--id", args.clusterid, "--sizein", "--fasta_width", "0","--minseqlength", "1", "--relabel", "Otu", "--centroids", outputFolder+"/otu_sequences.fa"], stdout=PIPE, stderr=PIPE).communicate() admin_log(outputFolder, out=out.decode(), error=error.decode(), function="vsearch cluster") if args.cluster == "vsearch_unoise": out, error = Popen(["vsearch", "--cluster_unoise", outputFolder+"/uniques_sorted.fa", "--unoise_alpha", args.unoise_alpha,"--minsize", args.abundance_minsize,"--minseqlength", "1", "--centroids", outputFolder+"/zotusvsearch.fa"], stdout=PIPE, stderr=PIPE).communicate() admin_log(outputFolder, out=out.decode(), error=error.decode(), function="vsearch unoise") out, error = Popen(["vsearch", "--uchime3_denovo", outputFolder+"/zotusvsearch.fa","--fasta_width", "0", "--nonchimeras", outputFolder + "/otu_sequences_nochime.fa"], stdout=PIPE, stderr=PIPE).communicate() admin_log(outputFolder, out=out.decode(), error=error.decode(), function="vsearch uchime_denovo3") count = 1 with open(outputFolder + "/otu_sequences_nochime.fa") as handle, open(outputFolder + "/otu_sequences.fa", 'a') as newotu: for record in SeqIO.parse(handle, "fasta"): newotu.write(">Otu" + str(count) + "\n") newotu.write(str(record.seq) + "\n") count += 1 Popen(["rm", outputFolder + "/otu_sequences_nochime.fa"]) if args.cluster == "vsearch_unoise_no_chimera_check": out, error = Popen(["vsearch", "--cluster_unoise", outputFolder+"/uniques_sorted.fa", "--unoise_alpha", args.unoise_alpha,"--minsize", args.abundance_minsize, "--minseqlength", "1", "--centroids", outputFolder+"/zotusvsearch.fa"], stdout=PIPE, stderr=PIPE).communicate() admin_log(outputFolder, out=out.decode(), error=error.decode(), function="vsearch unoise") count = 1 with open(outputFolder+"/zotusvsearch.fa") as handle, open(outputFolder + "/otu_sequences.fa", 'a') as newotu: for record in SeqIO.parse(handle, "fasta"): newotu.write(">Otu" + str(count) + "\n") newotu.write(str(record.seq) + "\n") count += 1 Popen(["rm", outputFolder+"/zotusvsearch.fa"])
def ip_url(): ip4_gw = '192.168.1.1' ip = ip4_gw #hopefully we will be pleasantly surprised and find that we upgrade to IPv6 like its 2001 ip6_gw_query = 'ip -6 r|grep "/"|head -n1|awk \'{print $1}\'|sed \'s/\/.*/1/\'|tr -d "\n"' ip4_gw_query = 'ip r|head -n1|awk \'{print $3}\'' ip6_gw = Popen(ip6_gw_query, shell=True, stdout=PIPE).stdout.read() ip6_gw_ping = "ping6 -q -c1 -w1 {} 1>/dev/null 2>/dev/null && echo 0 || echo 1".format(ip6_gw.decode("utf-8").rstrip()) ip6_gw_ping = Popen(ip6_gw_ping, shell=True, stdout=PIPE).stdout.read() if len(ip6_gw) >= 3 and ip6_gw_ping.decode("utf-8").rstrip() != 1 and not b"fe80::1" in ip6_gw: ip = ip6_gw.decode("utf-8").rstrip() else: if not ip6_gw_ping.decode("utf-8").rstrip() != 1 and not b"fe80::1" in ip6_gw: print('ERR: {}'.format(ip6_gw_ping.decode("utf-8").rstrip())) print('Using IPv4 gw: {} (maybe you should install nftables and enable IPv6?)'.format(ip)) return ip
def db_flyway(self, module=None, mode=None): storage = Config.get_config(type='env', name='storage')['value'] if storage[-1:] != '/': storage += '/' #storage = /data/nfs/ try: m = Prod_Module.objects.get(id=int(module)) except Exception as e: raise DeployError('042') return False product = Product.objects.filter(id=self.pid) db = Dep_Sql.objects.get(prod=product) jdbc = 'jdbc:mysql://' + db.db_host + ':' + db.db_port + '/' + db.db_name db_path = 'filesystem:' + storage + self.prod + '/' + self.prod + '-' + self.branch + '/' + self.prod + '-' + self.version + '/' + m.name + '/db' shell = [ 'sudo', conf.FLYWAYPATH + 'flyway', 'migrate', '-url=' + jdbc, '-user='******'-password='******'-locations=' + db_path, '-baselineOnMigrate=true' ] output, err = Popen(shell, stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate() validate = [ 'sudo', conf.VFLYWAYPATH + 'flyway', 'validate', '-url=' + jdbc, '-user='******'-password='******'-locations=' + db_path ] t = threading.Thread(target=self.db_flyway_validate, args=(validate, )) t.start() if len(err) > 0: LOG('deploy').error('执行SQL出错:' + str(err.decode('utf-8')), self.dep_status_id) shell = [ 'sudo', conf.FLYWAYPATH + 'flyway', 'repair', '-url=' + jdbc, '-user='******'-password='******'-locations=' + db_path ] output2, err2 = Popen(shell, stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate() if len(err2) > 0: LOG('deploy').error('执行恢复出错:' + str(err2.decode('utf-8')), self.dep_status_id) else: LOG('deploy').info('执行恢复成功:' + str(output2.decode('utf-8')), self.dep_status_id) return False else: LOG('deploy').info('执行SQL成功: ' + str(output.decode('utf-8')), self.dep_status_id) return True
def lookfor(files, pattern, interval): """ Look for a pattern in given files within interval """ message = '' timestamps = within(interval) for f in files.strip().split(","): abspath = os.path.abspath(f) heading = ("### Looking for %s log in %s within " "the last %d minutes ###\n" % (pattern, abspath, interval)) message = message + heading for timestamp in timestamps: patterns = timestamp + '.*' + pattern stdout, stderr = Popen(['grep', patterns, f], stdout=PIPE).communicate() gotcha = stdout.decode("utf-8") if gotcha == '': print("### Can't find any %s log at %s in %s ###" % (pattern, timestamp, f)) else: print("##### Found matching %s log at %s in %s #####" % (pattern, timestamp, f)) message = message + gotcha + "\n" return message
def test_contact(self): """ Make sure optional contact details can be set """ # add a logging handler that captures the info log output log_output = StringIO() debug_handler = logging.StreamHandler(log_output) acme_tiny.LOGGER.addHandler(debug_handler) # call acme_tiny with new contact details old_stdout = sys.stdout sys.stdout = StringIO() result = acme_tiny.main([ "--account-key", KEYS['account_key'].name, "--csr", KEYS['domain_csr'].name, "--acme-dir", self.tempdir, "--directory-url", self.DIR_URL, "--contact", "mailto:[email protected]", "mailto:[email protected]", ]) sys.stdout.seek(0) crt = sys.stdout.read().encode("utf8") sys.stdout = old_stdout log_output.seek(0) log_string = log_output.read().encode("utf8") # make sure the certificate was issued and the contact details were updated out, err = Popen(["openssl", "x509", "-text", "-noout"], stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate(crt) self.assertIn("Issuer: CN=Fake LE Intermediate", out.decode("utf8")) self.assertIn("Updated contact details:\nmailto:[email protected]\nmailto:[email protected]", log_string.decode("utf8")) # remove logging capture acme_tiny.LOGGER.removeHandler(debug_handler)
def get_file_mimetype(self, file_path: str, file_ext: str = "") -> str: """ return the mimetype of the file. see python module mimetype """ assert file_ext == "" or file_ext.startswith( "."), 'File extension must starts with ".""' # INFO - B.L - 2018/10/11 - If user force the file extension we do. first_path = file_path + file_ext if file_ext else file_path str_, encoding = mimetypes_storage.guess_type(first_path, strict=False) if not str_ or str_ == "application/octet-stream": mime = magic.Magic(mime=True) str_ = mime.from_file(file_path) if str_ and (str_ in AMBIGUOUS_MIMES): raw_mime = Popen( ["mimetype", "--output-format", "%m", file_path], stdin=PIPE, stdout=PIPE, stderr=PIPE, ).communicate()[0] str_ = raw_mime.decode("utf-8").replace("\n", "") if not str_: # Should never happen. raise ValueError("Cannot determine the type of " + file_path) return str_
def extract_corpus(docs=DOCS, corpus=CORPUS): """ Extracts a text corpus from the PDF documents and writes them to disk. """ # Create corpus directory if it doesn't exist. if not os.path.exists(corpus): os.mkdir(corpus) # For each PDF path, use pdf2txt to extract the text file. for path in get_documents(docs): # print 'Path: ', path # Call the subprocess command (must be on your path) # document = subprocess.check_output( # ['pdftotxt', path, '-'] # ) document = Popen(['pdftotext', path, '-'], stdout=PIPE).communicate()[0] # print document # Encode UTF-u and remove non-printable characters document = filter( lambda char: char in string.printable, unicodedata.normalize('NFKD', document.decode('utf-8')) ) # Write the document out to the corpus directory fname = os.path.splitext(os.path.basename(path))[0] + ".txt" outpath = os.path.join(corpus, fname) with codecs.open(outpath, 'w') as f: f.write(document)
def list_outdated_packages(python): """ Get a list of outdated packages :param str python: Path to the python executable :return list: Outdated Python packages if any exist; empty list otherwise """ # Run the command and put output in tmp_packs logging.debug("[{0}] Running {0} -m pip list -o".format(python)) try: outdated_packages = Popen([python, "-m", "pip", "list", "-o"], stdout=PIPE, stderr=PIPE).communicate()[0] except KeyboardInterrupt: logging.warning("[{}] Keyboard interrupt detected; Skipping this version...".format(python)) return [] except Exception as exp: logging.error("[{}] Exception encountered while listing outdated packages. {}".format(python, exp)) return [] # Outdated packages come in the form of <package_name> <version>\n # So it is first split by newlines and then only the package name is used packs = [] if outdated_packages: # noinspection PyTypeChecker packs = [pkg.split()[0].lower() for pkg in outdated_packages.decode('utf-8').split('\n')[2:] if pkg.split() and pkg.split()[0]] return packs
def encrypt(text): encryption_password = weechat.config_get_plugin("encryption_password") # decrypt the password if it is stored as secured data if encryption_password.startswith("${sec."): encryption_password = weechat.string_eval_expression(encryption_password, {}, {}, {}) if PY3: text = text.encode("UTF-8") command="openssl enc -aes-128-cbc -salt -base64 -md md5 -A -pass env:OpenSSLEncPW" opensslenv = os.environ.copy(); # Unknown whether the encryption password should or should not be # (UTF8-)encoded before being passed to the environment in python 3. opensslenv['OpenSSLEncPW'] = encryption_password output, errors = Popen(shlex.split(command), stdin=PIPE, stdout=PIPE, stderr=PIPE,env=opensslenv).communicate(text + b" ") output = output.replace(b"/", b"_") output = output.replace(b"+", b"-") output = output.replace(b"=", b"") if PY3: output = output.decode("UTF-8") return output
def get_passphrase(): """Get a password Returns: string """ conf = configparser.ConfigParser() conf.read(expanduser("~/.config/networkmanager-dmenu/config.ini")) pinentry = None try: pinentry = conf.get("dmenu", "pinentry") except (configparser.NoOptionError, configparser.NoSectionError): pass if pinentry: pin = "" out = Popen(pinentry, stdout=PIPE, stdin=PIPE).communicate( \ input=b'setdesc Get network password\ngetpin\n')[0] if out: res = out.decode(ENC).split("\n")[2] if res.startswith("D "): pin = res.split("D ")[1] return pin else: return Popen(dmenu_cmd(0, "Passphrase"), stdin=PIPE, stdout=PIPE).communicate()[0].decode(ENC)
def get_json(cmd): logger = logging.getLogger("aws_dns") out, err = Popen(cmd, stdout=PIPE, stderr=PIPE).communicate() if len(err) != 0: logger.warning("Command {0} reported error: {1}". format(cmd, err.decode("utf-8"))) return json.loads(out.decode("utf-8"))
def oc_delete_kiali_permissions_from_cluster(self): cmd = 'oc delete clusterrolebindings kiali' stdout, stderr = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate() return 'deleted' in stdout.decode( ) or 'clusterrolebindings.authorization.openshift.io "kiali" not found' in stderr.decode( )
def get_active_app_name(): cmd = """osascript \ -e 'tell application "System Events"' \ -e 'set app_name to name of the first process whose frontmost is true' \ -e 'end tell' """ active_app_name = Popen(cmd, shell=True, stdout=PIPE).stdout.read() return active_app_name.decode().strip()
def lookfor(file, pattern, timepattern, timezone, interval): """ Look for a pattern in given files within interval """ message = '' timestamps = within(timezone, timepattern, interval) since = localtime(timezone).strftime("%H:%M") abspath = os.path.abspath(file) heading = ("### Looking for %s log in %s " "the last %d minutes since %s %s ###\n" % (pattern, abspath, interval, since, timezone)) message = message + heading for timestamp in timestamps: # add `:` so it will match `HH:MM:` # not `HH:MM` which can be mislead to `MM:SS` patterns = timestamp + ':' + '.*' + pattern stdout, stderr = Popen(['grep', patterns, file], stdout=PIPE).communicate() gotcha = stdout.decode("utf-8") if gotcha == '': print("### Can't find any %s log at %s %s in %s ###" % (pattern, timestamp, timezone, file)) else: print("##### Found matching %s log at %s %s in %s #####" % (pattern, timestamp, timezone, file)) message = message + gotcha + "\n" return message
def tracked_files(): git_cmd = git_ls_tree() files = Popen(git_cmd, shell=True, stdout=PIPE).stdout.read() files = files.decode().split('\n') return files
def build(): global TESTS_MAP if not skip_build(): print('Building...') cmd = 'cd "{}" && mkdir -p build && cd build && \ clang -O2 -Werror --save-temps ../src/main.c -o main'.format(ROOT) process = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = process.communicate() if process.returncode != 0: sys.stdout.write(stdout.decode()) sys.stderr.write(stderr.decode()) raise Exception('clang exits non-zero!') TESTS_MAP = None if TESTS_MAP is None: cmd = 'nm {}/build/main | grep "^0"'.format(ROOT) stdout, _ = Popen(cmd, stdout=PIPE, shell=True).communicate() TESTS_MAP = {} for line in stdout.decode().splitlines(): addr, _, name = line.split() if name[:6] == '_test_': name = name[6:] TESTS_MAP[name] = int(addr, 16) elif name == '_main': main_point = int(addr, 16) for key in TESTS_MAP: TESTS_MAP[key] -= main_point
def hg_branch_iter_local(repo): cmd = ('hg', '-y', 'branches', '-c', '-R', repo) out = Popen(cmd, stdout=PIPE).communicate()[0] out = out.decode('utf8').split(linesep) out = (re.split('\s+', i, 1) for i in out if i) return (name for name, rev in out)
def tracked_files(): git_cmd = git_ls_tree() files = Popen(git_cmd, shell=True, stdout=PIPE).stdout.read() files = files.decode().split('\n') files = list(filter(lambda x: '__init__' not in x, files)) return files
def run_du(options): """ Run 'du' and returned parsed results """ output, err = Popen(["du"]+options+[args.path], stdout=PIPE).communicate() output = output.decode("utf-8") tuples = [l.split("\t") for l in output.splitlines()] return tuples
def locate_unilex(): out,_ = Popen('locate unilex', shell=True, stdout=PIPE).communicate() paths = [p for p in out.decode().splitlines() if basename(p) == 'unilex'] if not paths: print("Error: couldn't locate unilex", file=stderr) exit(-1) return paths[0]
def _gcc_run(self, lang, code): import os from subprocess import Popen, PIPE is_c = False if lang == 'c': compiler = 'gcc' is_c = True elif lang == 'cpp': compiler = 'g++' else: raise ValueError() newline_splited_code_list = code.split('\n') src_file = 'code.{}'.format('c' if is_c else 'cpp') exe_file = 'run' with open('./{}'.format(src_file), 'wt') as src: for line in code: src.write(line) compile_cmd = [compiler, src_file, '-o', exe_file] compile_proc = Popen(compile_cmd) compile_proc.wait() run_code_cmd = ['./{}'.format(exe_file)] output_bstr = Popen(run_code_cmd, stdout=PIPE).communicate()[0] output_str = output_bstr.decode('utf-8') output = output_str.split('\n') if os.path.exists(exe_file): os.remove(exe_file) return output
def check(parseQueue, mRFile, path): newFiles, err = Popen(["/home/dev/GroundSeg/MCGSmain/autoCheck/autoChecker.sh",path, mRFile], stdout=PIPE).communicate() for newFile in newFiles.decode('utf-8').split(): parseQueue.put(path+newFile) print("Adding file to parse Queue: {}".format(path+newFile)) threading.Timer(30.0,check,args=(parseQueue, mRFile, path)) return
def run(cmd,fail=Host+":"+PathShort+" \$ "): res, err = Popen(cmd, stdout=PIPE, stderr=PIPE).communicate() err_string = err.decode('utf-8') if 'fatal' in err_string: print fail sys.exit(0) return res.decode('utf-8')
def create_history_frequencies(): home = path.expanduser('~') logger.debug("user home path = '%s'" % home) shell_byte = Popen("echo $SHELL", shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT).communicate()[0] shell_path = shell_byte.decode("utf-8").strip() shell_name = shell_path.rsplit("/", 1)[-1] logger.debug("shell path = '%s'" % shell_path) logger.debug("shell name = '%s'" % shell_name) words = {} if shell_name in ["bash", "sh", "ksh"]: if shell_name in ["ksh"]: filepath = home + "/.sh_history" elif shell_name in ["bash", "sh"]: filepath = home + "/.bash_history" else: raise Exception() with codecs.open(filepath, "r", encoding='utf-8', errors='ignore') as f: for line in f: word = command_pattern.sub("", line).strip() words[word] = words.get(word, 0) + 1 elif shell_name in ["zsh"]: with codecs.open(home + "/.zsh_history", "r", encoding='utf-8', errors='ignore') as f: for line in f: parts = line.split(";", 1) if len(parts) < 2: continue word = command_pattern.sub("", parts[1]).strip() words[word] = words.get(word, 0) + 1 elif shell_name in ["csh"]: logger.warning("Not implemented!") # TODO: else: raise Exception("Unknown shell : '%1'" % shell) return tuple(words.items())
def run(cmd, fail=Host + ":" + PathShort + " \$ "): res, err = Popen(cmd, stdout=PIPE, stderr=PIPE).communicate() err_string = err.decode('utf-8') if 'fatal' in err_string: print fail sys.exit(0) return res.decode('utf-8')
def addfileextension(attachmentpath, attachmentname): if debug: print("DEBUG - addfileextension(): called") shutil.copyfile(attachmentpath + attachmentname, attachmentname) while True: try: output2, _ = Popen(['file', '-bi', attachmentname], stdout=PIPE).communicate() output = output2.decode('utf-8') mime = output.split(';', 1)[0].lower().strip() if debug: print("DEBUG - addfileextension - detected MIME: " + output) # it doesn't recognize AAC which is standard for voice messages. Therefore: if mime == "audio/x-hx-aac-adts": ext = os.path.extsep + 'aac' else: ext = mimetypes.guess_extension(mime, strict=False) if ext is None: ext = os.path.extsep + 'undefined' if debug: print("DEBUG - addfileextension() - file extension: " + ext) filename = attachmentname + ext if debug: print("DEBUG - addfileextension() - filename: " + filename) os.rename(attachmentname, filename) break except OSError: # file not found print("addfileextension() error: file not found!") break if debug: print("DEBUG - addfileextension(): finished, returning string: " + str(filename)) return(filename)
def test_dump(self): '''test_add_delete will test the add and delete functions ''' print('Testing json DUMP') from sutils import write_json, read_json print('Case 1: Dumping file.') jsondump = {'HELLO':'KITTY', 'BATZ':'MARU', 'MY':'MELODY' } write_json(jsondump,self.file) self.assertTrue(os.path.exists(self.file)) script_path = "%s/helpers/json/dump.py" %(self.here) if VERSION == 2: testing_command = ["python2",script_path,'--file',self.file] else: testing_command = ["python3",script_path,'--file',self.file] output = Popen(testing_command,stderr=PIPE,stdout=PIPE) t = output.communicate()[0],output.returncode result = {'message':t[0], 'return_code':t[1]} self.assertEqual(result['return_code'],0) output = result['message'] if isinstance(output,bytes): output = output.decode(encoding='UTF-8') dump = ['HELLO:"KITTY"', 'BATZ:"MARU"', 'MY:"MELODY"'] result = output.strip('\n').split('\n')[-3:] for res in result: self.assertTrue(res in dump)
def blast_search(self, q): """ Perform a blast search and store the result """ if type(q) == SeqRecord: self.query = q.seq if hasattr(q, "letter_annotations"): if 'phred_quality' in list(q.letter_annotations.keys()): self.query_qual = q.letter_annotations["phred_quality"] else: self.query_qual = None else: self.query = q self.query_length = len(q) blastn_query = self.blastn_query_str.format(**locals()) resp, err = Popen(blastn_query, stdout=PIPE, stderr=PIPE, shell=True).communicate() if not resp: return None if err: raise Exception(err) # Format variables resp = [ OrderedDict( list( zip(self.output_format, list(map(autoconvert, x.split("\t")))))) for x in resp.decode("utf-8").splitlines() ] return resp
def bam_count(bam_file, fasta_ref, output_dir, q=0, b=0, feature_name='', site_file='', force=False): sample = os.path.basename(bam_file).split(".")[0] if feature_name == '' and site_file == '': out_file = os.path.join(output_dir, '{0}_raw.csv'.format(sample)) cmd = 'bam-readcount -w 0 -q {0} -b {1} -i -f {2} {3} > {4}'.format( q, b, fasta_ref, bam_file, out_file) elif feature_name != '' and site_file == '': out_file = os.path.join(output_dir, '{0}_{1}_raw.csv'.format(sample, feature_name)) cmd = 'bam-readcount -w 0 -q {0} -b {1} -i -f {2} {3} > {4}'.format( q, b, fasta_ref, bam_file, out_file) else: out_file = os.path.join(output_dir, '{0}_{1}_raw.csv'.format(sample, feature_name)) cmd = 'bam-readcount -w 0 -q {0} -b {1} -i -l {2} -f {3} {4} > {5}'.format( q, b, site_file, fasta_ref, bam_file, out_file) if not os.path.exists(out_file) or force: process = Popen(cmd, shell=True, stdout=PIPE, stderr=STDOUT).stdout.read() log_info = "Command line executed: {0}\n\n\n{1}".format( cmd, process.decode("utf-8")) print(log_info) else: print('\nRead count file {0} already done.\n'.format(out_file)) return out_file
def _get_changed_files() -> str: try: output = Popen(["git", "diff", "HEAD", "HEAD~", "--name-only"], stdout=PIPE).communicate()[0] return output.decode("utf-8") except subprocess.CalledProcessError as sbcpe: raise sbcpe
def bluetooth_turn(request, bluetooth_device_id): from subprocess import Popen, PIPE from .bluetooth_tools import bluetooth_scan from django.http import HttpResponseRedirect from django.shortcuts import get_object_or_404 from .models import Bluetooth, BluetoothDevice bluetooth = get_object_or_404(BluetoothDevice, pk=bluetooth_device_id) print('got 404') print(bluetooth) if request.method == 'POST': is_active, err = Popen(['systemctl', 'is-active', 'bluetooth'], stdout=PIPE, stderr=PIPE).communicate() if is_active.decode('utf-8').replace('\n', '') == 'active': print('turning off') bluetooth_scan.turn_off(bluetooth_device_id) else: print('turning on') bluetooth_scan.turn_on() controller = bluetooth_scan.controller_show() bluetooth_device = BluetoothDevice.objects.get(id=controller) bluetooth_scan.main(bluetooth_device_id=controller) return HttpResponseRedirect('/')
def test_dump(self): '''test_add_delete will test the add and delete functions ''' print('Testing json DUMP') from sutils import write_json, read_json print('Case 1: Dumping file.') jsondump = {'HELLO': 'KITTY', 'BATZ': 'MARU', 'MY': 'MELODY'} write_json(jsondump, self.file) self.assertTrue(os.path.exists(self.file)) script_path = "%s/helpers/json/dump.py" % self.here if VERSION == 2: testing_command = ["python2", script_path, '--file', self.file] else: testing_command = ["python3", script_path, '--file', self.file] output = Popen(testing_command, stderr=PIPE, stdout=PIPE) t = output.communicate()[0], output.returncode result = {'message': t[0], 'return_code': t[1]} self.assertEqual(result['return_code'], 0) output = result['message'] if isinstance(output, bytes): output = output.decode(encoding='UTF-8') dump = ['HELLO:"KITTY"', 'BATZ:"MARU"', 'MY:"MELODY"'] result = output.strip('\n').split('\n')[-3:] for res in result: self.assertTrue(res in dump)
def _write_classic_config(peer_conf_dir, peer_cfg, qrcode, qrcode_type): ret = {'peers': {}} if path.isdir(peer_conf_dir): for interface in peer_cfg: for peer, peer_cfg in peer_cfg[interface].items(): yml_file, cfg_file = _gen_filenames(peer_conf_dir, interface, peer) #write conf file _write_conf(cfg_file, peer_cfg) ret['peers'].update({peer: {'cfg_file': cfg_file}}) # write conf also as yml open(yml_file, 'w').write(yaml.dump(peer_cfg, default_flow_style=False)) if qrcode: qrcode = Popen(['qrencode', '-t', qrcode_type], shell=False, stdin=PIPE, stdout=PIPE).communicate(input=open( '{}/{}_{}.conf'.format( peer_conf_dir, interface, peer), 'r').read().encode('utf8'))[0] if qrcode_type == 'ansiutf8': # https://stackoverflow.com/questions/14693701/how-can-i-remove-the-ansi-escape-sequences-from-a-string-in-python/14693789#14693789 # Definitely the more efficient way compared with my own regex before... ansi_escape = re.compile( r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])') qrcode = ansi_escape.sub('', qrcode.decode('utf8')) ret['peers'][peer].update({'qrcode': str(qrcode)}) return ret else: return False
def get_passphrase(): """Get a password Returns: string """ conf = configparser.ConfigParser() conf.read(expanduser("~/.config/networkmanager-dmenu/config.ini")) pinentry = None if conf.has_option("dmenu", "pinentry"): pinentry = conf.get("dmenu", "pinentry") if pinentry: pin = "" out = Popen(pinentry, stdout=PIPE, stdin=PIPE).communicate( \ input=b'setdesc Get network password\ngetpin\n')[0] if out: res = out.decode(ENC).split("\n")[2] if res.startswith("D "): pin = res.split("D ")[1] return pin else: return Popen(dmenu_cmd(0, "Passphrase"), stdin=PIPE, stdout=PIPE).communicate()[0].decode(ENC)
def getClasterAdmins(self, cluster, host, port): admins = [] auth_cluster = self.getSessionAuth(cluster, 'cluster') cmd = CMD_PREFIX + ' ' + RAC + ' cluster admin list --cluster=' + cluster + ' ' + auth_cluster + host + ':' + port if DEBUG: print(cmd) ret, err = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate() ret = ret.decode('utf-8') err = err.decode('utf-8') if len(err) > 0: if DEBUG: print('Есть ошибки:', err) self.Mess('warn', err + ' на ' + host + ':' + port) else: if DEBUG: print(ret) ar = ret.split('\n') admin = {} i = 0 for line in ar: if line.find(' : ') > 0: key = (line.split(' : '))[0].strip() value = (line.split(' : '))[1].strip() admin[key] = value if key == 'descr': admins.append(admin) admin = {} i = i + 1 return admins
def test_module_linecount(self): """ This project is supposed to remain under 200 lines """ test_dir = os.path.dirname(os.path.realpath(__file__)) module_path = os.path.abspath(os.path.join(test_dir, os.pardir, "acme_tiny.py")) out, err = Popen(["wc", "-l", module_path], stdout=PIPE, stderr=PIPE).communicate() num_lines = int(out.decode("utf8").split(" ", 1)[0]) self.assertTrue(num_lines <= 200)
def test_contact(self): """ Make sure optional contact details can be set """ # add a logging handler that captures the info log output log_output = StringIO() debug_handler = logging.StreamHandler(log_output) acme_tiny.LOGGER.addHandler(debug_handler) # call acme_tiny with new contact details old_stdout = sys.stdout sys.stdout = StringIO() result = acme_tiny.main([ "--account-key", self.KEYS['account_key'].name, "--csr", self.KEYS['domain_csr'].name, "--acme-dir", self.tempdir, "--directory-url", self.DIR_URL, "--check-port", self.check_port, "--contact", "mailto:[email protected]", "mailto:[email protected]", ]) sys.stdout.seek(0) crt = sys.stdout.read().encode("utf8") sys.stdout = old_stdout log_output.seek(0) log_string = log_output.read().encode("utf8") # make sure the certificate was issued and the contact details were updated out, err = Popen(["openssl", "x509", "-text", "-noout"], stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate(crt) self.assertIn(self.ca_issued_string, out.decode("utf8")) self.assertTrue(( # can be in either order "Updated contact details:\nmailto:[email protected]\nmailto:[email protected]" in log_string.decode("utf8") or "Updated contact details:\nmailto:[email protected]\nmailto:[email protected]" in log_string.decode("utf8") )) # remove logging capture acme_tiny.LOGGER.removeHandler(debug_handler)
def _get_vc_env(): """ Run the batch file specified in the vc_vars_path setting and return back a dictionary of the environment that the batch file sets up. Returns None if the batch file fails. """ settings = sublime.load_settings('Preferences.sublime-settings') vars_cmd = settings.get( 'odin_vc_vars_path', 'C:\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Auxiliary\\Build\\vcvarsall.bat' ) """vars_arch = settings.get("vc_vars_arch", "amd64")""" try: # Run the batch, outputting a sentinel value so we can separate out # any error messages the batch might generate. shell_cmd = "\"{0}\" {1} && echo {2} && set".format( vars_cmd, 'x64', SENTINEL) output = Popen(shell_cmd, stdout=PIPE, shell=True).stdout.read() lines = [line.strip() for line in output.decode('utf-8').splitlines()] env_lines = lines[lines.index(SENTINEL) + 1:] except: return None # Convert from var=value to dictionary key/value pairs. We upper case the # keys, since Python does that to the mapping it stores in environ. env = {} for env_var in env_lines: parts = env_var.split('=', maxsplit=1) env[parts[0].upper()] = parts[1] return env
def get_running_containers(args): """Finds running Docker containers by parsing the output of `docker ps`.""" args = ['docker', 'ps'] + args out, err = Popen(args, stdout=PIPE, stderr=PIPE).communicate() header, *rows = out.decode('utf-8').splitlines() column_ranges = [find_column_range(header, column) for column in COLUMNS] running_containers = [row_to_container(row, column_ranges) for row in rows] return running_containers
def initClnt(self): path, err = Popen("pwd", stdout=PIPE).communicate() clnt_cmd = "uftpd -D " + path.decode('utf-8').rstrip() + '/' + self.beaconFolder Popen(shlex.split("sudo killall -9 uftpd")) Popen(shlex.split(clnt_cmd)) # The above system is awful. # Can only have one instance of uftpd running # Future Dawson will hopefully fix this return
def get_spatch_version(self): cmd = [self.spatch] + [ '-version'] try: output = Popen(cmd, stdout=PIPE, stderr=STDOUT).communicate()[0] except OSError as err: _raise_run_err(err, cmd) reg = r"version (.*?) with" m = re.search(reg, output.decode('utf8')) return m.group(1)
def gettermsize(): ## Call `stty` to determine the size of the terminal, this way is better than using python's ncurses for channel in (sys.stderr, sys.stdout, sys.stdin): termsize = Popen(['stty', 'size'], stdout=PIPE, stdin=channel, stderr=PIPE).communicate()[0] if len(termsize) > 0: termsize = termsize.decode('utf8', 'replace')[:-1].split(' ') # [:-1] removes a \n termsize = [int(item) for item in termsize] return termsize return (24, 80) # fall back to minimal sane size
def getSysPath(): command = "/usr/bin/login -fpql $USER $SHELL -l -c 'echo -n $PATH'" # Execute command with original environ. Otherwise, our changes to the PATH propogate down to # the shell we spawn, which re-adds the system path & returns it, leading to duplicate values. sysPath = Popen(command, stdout=PIPE, shell=True, env=originalEnv).stdout.read() # Decode the byte array into a string, remove trailing whitespace, remove trailing ':' return sysPath.decode("utf-8").rstrip().rstrip(':')
def j(path): cmd = ['autojump'] + path.split() newpath = Popen( cmd, stdout=PIPE, shell=False).communicate()[0].strip() if newpath: ip.magic('cd %s' % newpath.decode('utf-8'))
def shs(cmd): from subprocess import Popen, PIPE try: stdout, stderr = Popen(cmd.split(), stdout=PIPE).communicate() except OSError as e: raise RuntimeError("Cannot run command `{0}`".format(cmd), e) if stderr: raise RuntimeError(stderr) return stdout.decode('utf-8').strip()
def hg_branch_iter_remote(repo, python): with NamedTemporaryFile() as fh: cmd = (hg_list_remote_py % repo).encode('utf8') fh.write(cmd) fh.flush() out = Popen((python, fh.name), stdout=PIPE).communicate()[0] out = literal_eval(out.decode('utf8')) return [i[0] for i in out]
def runPerl(self): command = [ "perl", "third-party/Lingua-BO-Wylie-dev/bin/pronounce.pl", "-j", "' '", self.filename, "-", ] output = Popen(command, stdout=PIPE).communicate()[0] return output.decode("utf-8").upper().split("\n")
def _update_data(self): res = Popen(["watson", "status"], stdout=PIPE).communicate()[0] res = res.decode() if not res.startswith("No project started"): out = self._color_text(res.split()[1].strip(), fg=self.cfg.watson.color_fg, bg=self.cfg.watson.color_bg) else: out = "" return (self.__module__, self._out_format(out))
def test_cmd_help(self): cmd = 'rosservice' sub = ['args', 'info', 'list', 'call', 'type', 'uri', 'find'] output = Popen([cmd], stdout=PIPE).communicate()[0] output = output.decode() self.assert_('Commands' in output) output = Popen([cmd, '-h'], stdout=PIPE).communicate()[0] output = output.decode() self.assert_('Commands' in output) # make sure all the commands are in the usage for c in sub: self.assert_("%s %s"%(cmd, c) in output, output) for c in sub: output = Popen([cmd, c, '-h'], stdout=PIPE).communicate() self.assert_("Usage:" in output[0].decode(), output) # make sure usage refers to the command self.assert_("%s %s" % (cmd, c) in output[0].decode(), output)