Esempio n. 1
0
    def _init_info(self, nddshome=None):
        if not nddshome:
            nddshome = get_nddshome()
        if not nddshome or not nddshome.exists():
            logger.debug("NDDSHOME not found: '{}'", nddshome)
            return
        rti_versions = pathlib.Path(f"{nddshome}/rti_versions.xml")
        if not rti_versions.exists():
            raise ValueError("required path not found: {}", rti_versions)
        # read installed architectures
        result = cut(
            cut(
                grep(
                    grep("-A", "1",
                         "RTI Connext DDS Pro Target Package Installed",
                         str(rti_versions)), "<architecture>"), "-d>", "-f2-"),
            "-d<", "-f1")
        targets = frozenset(result.stdout.decode("utf-8").split("\n")[:-1])

        rs_path = pathlib.Path(f"{nddshome}/bin/rtiroutingservice")
        if not rs_path.exists():
            raise ValueError("required path not found: {}", rs_path)

        svc = types.SimpleNamespace(routing_service=rs_path)

        py = types.SimpleNamespace(
            build=functools.partial(build_connextddspy, nddshome))

        self._info = types.SimpleNamespace(path=nddshome,
                                           targets=targets,
                                           service=svc,
                                           copy_to=functools.partial(
                                               copy_nddshome, nddshome),
                                           py=py)
def delete_Depart_User(departName):
    try:
        ldap_res = sh.cut(
            sh.cut(
                sh.cut(
                    sh.grep(
                        sh.ldapsearch('-x', '-D', 'cn=root,dc=limikeji,dc=com',
                                      '-w', 'limikeji', 'cn=%s' % departName),
                        'uid'), '-d', ' ', '-f2'), '-d', ',', '-f1'), '-d',
            '=', '-f2').stdout.decode('utf-8')
        ldap_resList = str(ldap_res).split('\n')[:-1]  #部门所有的人员姓名
        if 'test' in ldap_resList:
            ldap_resList.remove('test')
            print('删除组%s' % departName)
            # 再删除部门中的用户,如果只是删除用户的话,那么部门中仍然可以看到用户,只是用户的dn是不可用的,显示结果为一个红叉
            for user in ldap_resList:
                try:
                    ldap_res = sh.grep(
                        sh.ldapsearch('-x', '-D', 'cn=root,dc=limikeji,dc=com',
                                      '-w', 'limikeji', 'uid=%s' % user),
                        'mail').stdout.decode('utf-8')
                    print(ldap_res)
                except sh.ErrorReturnCode_1:
                    with open('/tmp/temp_user_delete_attr.ldif',
                              'w',
                              encoding='utf-8') as file_handler:
                        file_handler.write(
                            ATTR_DELETE_MODIFY.format_map({
                                'depart_name': departName,
                                'username': user
                            }))
                    sh.ldapadd('-x', '-D', 'cn=root,dc=limikeji,dc=com', '-w',
                               'limikeji', '-f',
                               '/tmp/temp_user_delete_attr.ldif')
        else:
            print('删除组%s' % departName)
            for user in ldap_resList:

                try:
                    ldap_res = sh.grep(
                        sh.ldapsearch('-x', '-D', 'cn=root,dc=limikeji,dc=com',
                                      '-w', 'limikeji', 'uid=%s' % user),
                        'mail').stdout.decode('utf-8')
                    print(ldap_res)
                except sh.ErrorReturnCode_1:
                    with open('/tmp/temp_user_delete_attr.ldif',
                              'w',
                              encoding='utf-8') as file_handler:
                        file_handler.write(
                            ATTR_DELETE_MODIFY.format_map({
                                'depart_name': departName,
                                'username': user
                            }))
                    sh.ldapadd('-x', '-D', 'cn=root,dc=limikeji,dc=com', '-w',
                               'limikeji', '-f',
                               '/tmp/temp_user_delete_attr.ldif')

    except sh.ErrorReturnCode_1:
        pass
Esempio n. 3
0
def parse(filename, **kwargs):
    # cat outfile | grep ip | cut -d '|' -f 2 | cut -d ' ' -f 3 | cut -d '.' -f 4 | sort -n | wc -l
    return sh.sort(cut(cut(cut(grep(cat(filename), "ip"), d="|", f=2),
                           d=" ",
                           f=3),
                       d=".",
                       f=4),
                   "-n",
                   _out=kwargs.get("_out"))
Esempio n. 4
0
def ipv4_nslookup(ip):
    ip = ipaddress.ip_address(ip)
    from sh import awk, cut, nslookup, rev
    # nslookup ${ip} | cut -d= -f2- | awk '{print $1;}' | rev | cut -d. -f2- | rev
    hostname = str(
        rev(
            cut(rev(awk(cut(nslookup(str(ip)), "-d=", "-f2-"), "{print $1;}")),
                "-d.", "-f2-"))).strip()
    if hostname:
        return hostname
    raise StopIteration()
Esempio n. 5
0
def get_container_id():
    "get container id"
    id_ = sh.cut(sh.head(sh.cat("/proc/self/cgroup"), "-n", "1"), "-d", "/",
                 "-f4").strip()
    if not id_:
        return "unknown"
    return id_
def get_files(usaf, wban):
    output = sh.grep("%s %s" % (usaf, wban),
                     "isd-history.txt").strip().split(" ")
    end = int(output.pop()[0:4])
    start = int(output.pop()[0:4])
    sh.mkdir("-p", "%s-%s" % (usaf, wban))
    os.chdir("%s-%s" % (usaf, wban))
    for year in range(start, end + 1):
        fn = "%s-%s-%s.gz" % (usaf, wban, year)
        if not os.path.exists(fn):
            sh.wget("ftp://ftp.ncdc.noaa.gov/pub/data/noaa/%s/%s" % (year, fn))
            print(fn)
    output_fn = "%s-%s-data.csv" % (usaf, wban)
    h = open(output_fn, "w")
    sh.sort(sh.cut(
        sh.grep(
            sh.cut(sh.zcat(glob.glob("*.gz")), "--output-delimiter=,",
                   "-c16-27,88-92"), "-v", "\+9999"), "--output-delimiter=.",
        "-c1-17,18"),
            _out=h)
    sh.gzip(output_fn)
    sh.mv("%s.gz" % (output_fn), "..")
Esempio n. 7
0
def split_vcf(in_file, config, out_dir=None):
    """
    split a VCF file into separate files by chromosome
    requires tabix to be installed

    """
    if out_dir is None:
        out_dir = os.path.join(os.path.dirname(in_file), "split")

    fasta_file = config["ref"]["fasta"]
    fasta_index = fasta_file + ".fai"
    samtools_path = config["program"].get("samtools", "samtools")
    tabix_path = config["program"].get("tabix", "tabix")

    if not file_exists(fasta_index):
        samtools = sh.Command(samtools_path)
        samtools.faidx(fasta_file)

    # if in_file is not compressed, compress it
    (_, ext) = os.path.splitext(in_file)
    if ext is not ".gz":
        gzip_file = in_file + ".gz"
        if not file_exists(gzip_file):
            sh.bgzip("-c", in_file, _out=gzip_file)
        in_file = gzip_file

    # create tabix index
    tabix_index(in_file)

    # find the chromosome names from the fasta index file
    chroms = str(sh.cut("-f1", fasta_index)).split()

    # make outfile from chromosome name
    def chr_out(chrom):
        out_file = replace_suffix(append_stem(in_file, chrom), ".vcf")
        return os.path.join(out_dir, os.path.basename(out_file))

    # run tabix to break up the vcf file
    def run_tabix(chrom):
        tabix = sh.Command(tabix_path)
        out_file = chr_out(chrom)
        if file_exists(out_file):
            return out_file
        with file_transaction(out_file) as tmp_out_file:
            tabix("-h", in_file, chrom, _out=tmp_out_file)
        return out_file

    out_files = map(run_tabix, chroms)
    return out_files
Esempio n. 8
0
def compareIp():
    logging.debug("Comparing ip...")
    #ip a s eth0 | egrep -o 'inet [0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}' | cut -d' ' -f2
    curIp = str(cut(egrep(ip('a', 's', 'eth0'), '-o', 'inet [0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}'), '-d', ' ', '-f2')).strip()
    masterNetworkCfg = cat('/etc/systemd/network/000-master.network').split('\n')
    for i in range(len(masterNetworkCfg)):
        if masterNetworkCfg[i] == '[Address]':
            masterIp = masterNetworkCfg[i+1].split('=')[1].strip()
            logging.debug("Master ip found, breaking the loop...")
            break 
    if curIp==masterIp:
        logging.debug("Master IP == cur IP...")
        return True, masterIp
    logging.debug("Master IP != cur IP...")
    return False, masterIp
Esempio n. 9
0
    def _break_vcf(self, in_file):
        if not file_exists(self.fasta_index):
            sh.samtools.faidx(self.fasta_file)

        # if file is not compressed, compress it
        (_, ext) = os.path.splitext(in_file)
        if ext is not ".gz":
            gzip_file = in_file + ".gz"
            sh.bgzip("-c", in_file, _out=gzip_file)
            in_file = gzip_file

        # create tabix index if it does not exist already
        if not file_exists(in_file + ".tbi"):
            sh.tabix("-p", "vcf", in_file)

        # find the chromosome names from the fasta index file
        chroms = str(sh.cut("-f1", self.fasta_index)).split()
        break_dir = os.path.join(os.path.dirname(in_file), "break")
        safe_makedir(break_dir)

        def chr_out(chrom):
            out_file = os.path.join(break_dir, append_stem(in_file, chrom))
            out_file = replace_suffix(out_file, "vcf")
            return out_file

        def tabix(chrom):
            out_file = chr_out(chrom)
            if file_exists(out_file):
                return out_file
            with file_transaction(out_file) as tmp_out_file:
                sh.tabix("-h", in_file, chrom, _out=tmp_out_file)
            return out_file

        # use tabix to separate out the variants based on chromosome
        out_files = map(tabix, chroms)

        return out_files
Esempio n. 10
0
    def _break_vcf(self, in_file):
        if not file_exists(self.fasta_index):
            sh.samtools.faidx(self.fasta_file)

        # if file is not compressed, compress it
        (_, ext) = os.path.splitext(in_file)
        if ext is not ".gz":
            gzip_file = in_file + ".gz"
            sh.bgzip("-c", in_file, _out=gzip_file)
            in_file = gzip_file

        # create tabix index if it does not exist already
        if not file_exists(in_file + ".tbi"):
            sh.tabix("-p", "vcf", in_file)

        # find the chromosome names from the fasta index file
        chroms = str(sh.cut("-f1", self.fasta_index)).split()
        break_dir = os.path.join(os.path.dirname(in_file), "break")
        safe_makedir(break_dir)

        def chr_out(chrom):
            out_file = os.path.join(break_dir, append_stem(in_file, chrom))
            out_file = replace_suffix(out_file, "vcf")
            return out_file

        def tabix(chrom):
            out_file = chr_out(chrom)
            if file_exists(out_file):
                return out_file
            with file_transaction(out_file) as tmp_out_file:
                sh.tabix("-h", in_file, chrom, _out=tmp_out_file)
            return out_file

        # use tabix to separate out the variants based on chromosome
        out_files = map(tabix, chroms)

        return out_files
Esempio n. 11
0
        events.append(int(line))
# /eos/atlas/atlastier0/rucio/data15_cos/express_express/00266661/data15_cos.00266661.express_express.merge.RAW
files = [x[:-1] for x in list(eos.ls(args.eos, _iter=True))]

if args.bylb:
    for i, file in enumerate(["root://eosatlas/%s/%s" % (args.eos, x) for x in files]):
        lb = re_lb.search(file).group(1)
        run = re_run.search(file).group(1)
        output = "run%s_lb%s.RAW" % (run, lb)
        if os.path.exists(output):
            print("%d/%d Skip %s" % (i, len(files), file))
            continue
        else:
            print("%d/%d Process %s" % (i, len(files), file))

        file_events = set(int(x[:-1]) for x in list(cut(cut(tail(atl_list("-l", file), '-n+12'),
                                                            '-d', " ", '-f3'), '-d', '=', '-f2', _iter=True)) if len(str(x[:-1])) > 1)
        extract_events = file_events.intersection(events)
        if not extract_events:
            continue

        params = []
        for e in extract_events:
            params += ["-e", e]
        params += ["-o", output]
        params += [file]
        atl_copy(*params)
        print("Created %s file with events %s" % (output, str(extract_events)))
else:
    run = None
    params = []
    for e in events:
Esempio n. 12
0
DATA_BASES = DATA_BASES[
    1:]  # first entry is 'Database' which is not a Database
DATA_BASES += ['All-Databases']
DATA_BASES = ['trading_oanda_d1']
DATESTAMP = sh.date("+%Y-%m-%d_%H:%M").strip()

for DB in DATA_BASES:
    for DD in [DATA_DIR, LOG_DIR]:
        # step a): delete all except the latest two files for each database
        print(f'database: {DB}; dir: {DD}')
        a = sh.find(DATA_DIR, '-maxdepth', '1', '-type', 'f', '-regextype',
                    'sed', '-regex', f'^/.*{DB}\-[0-9].*', '-printf',
                    '%Ts\t%p\n')
        b = sh.sort(a, '-n')
        c = sh.head(b, '-n', '-2')
        d = sh.cut(c, '-f', '2-')
        print(d.strip())
        e = sh.xargs(d, 'rm', '-rf')

    # step b): export the databases
    FILENAME = Path.joinpath(DATA_DIR, f'{DB}-{DATESTAMP}.sql.gz')
    print(f'FILENAME: {FILENAME}')
    LOGFILENAME = Path.joinpath(LOG_DIR, f'{DB}-{DATESTAMP}.log')
    print(f'LOGFILENAME: {LOGFILENAME}')

    # cmd = "mysqldump  -v --single-transaction --quick --lock-tables=false ${DB} 2>'${LOGFILENAME}' |  pigz > '${FILENAME}' "
    # sh.mysqldump('-v', '--single-transaction', '--quick', '--lock-tables=false', DB, _out=FILENAME, _err=LOGFILENAME)
    sh.ls(DATA_DIR, _out=FILENAME)
    print()
# b = sh.head(sh.sort(sh.find(DATA_DIR, '-type', 'd',  '-regextype', 'sed', '-regex', '^/.*testDB\-[0-9].*', '-printf', '%Ts\t%p\n'), '-n'), '-n', '-2')
# print(b)
Esempio n. 13
0
if len(gitIgnorePath) < 1:
    print >> sys.stderr, "Couldn't find git repo."
    sys.exit(1)
elif len(gitIgnorePath) > 1:
    print >> sys.stderr, "Found more than one .gitignore, using least nested for filtering."
    gitIgnorePath = [
        min(gitIgnorePath, key=lambda x: op.normpath(x).count(os.sep))
    ]

print ".gitignore path: " + op.join(gitIgnorePath[0], ".gitignore")

# Figure out which files git will ignore
os.chdir(gitIgnorePath[0])
ignored_files = sh.cut(
    sh.git("ls-files", "--others", "-i", "--exclude-standard"), "-d ",
    "-f3-").stdout.split("\n")
relative_git_path = op.relpath(gitIgnorePath[0], args.dir)
ignored_set = set()
for f in ignored_files:
    if f:  # Usually one empty newline to filter
        ignored_set.add(op.join(relative_git_path, f).lstrip("./"))
print "Ignored files: " + str(ignored_set)
print "Ignored executables: " + str(executable)

print "Too large before: " + str(tooLarge)
tooLarge -= ignored_set
tooLarge -= executable
print "Too large after: " + str(tooLarge)

doExit = False
Esempio n. 14
0
 def do_mcl(self, inflation = 1.5, resource = 4):
     sh.cut("-f", "1,2,11", self.blast, _out = self.abc)
     sh.mcxload("-abc", self.abc, "--stream-mirror", "--stream-neg-log10", "-stream-tf", "ceil(200)", "-o",  self.mci, "-write-tab"  , self.tab)
     sh.mcl(self.mci, "-I", inflation , "-resource", resource, "-use-tab",  self.tab, "-o",  self.raw_clusters)
Esempio n. 15
0
def get_password(username="******", shadow_file="/etc/shadow"):
    return cut(grep(username, shadow_file), "-f", 2, "-d", ":").strip()
Esempio n. 16
0
    dicti = {s.id : len(s) for s in SeqIO.parse(file,"fasta")}

qcov = 0.40
identity = 30


blast_covs = (raw_blast['subjectend']-raw_blast['subjectstart']+1)/ [float(dicti[n]) for n in raw_blast.index.get_level_values(0)]
# -raw_blast['mismatches']- raw_blast['gapopenings']
filtered_blast = raw_blast[blast_covs > qcov]
filtered_blast = filtered_blast[filtered_blast['identity'] > identity]
print "MCL ing"

filtered_blast.to_csv(genes +".blast",sep="\t", header=False)
resource = 4
inflation = 1.1
sh.cut("-f", "1,2,11", genes +".blast", _out = genes +".abc")
sh.mcxload("-abc", genes +".abc", "--stream-mirror", "--stream-neg-log10", "-stream-tf", "ceil(200)", "-o",  genes +".mci", "-write-tab"  , genes +".tab")
sh.mcl(genes +".mci",  "-use-tab",  genes +".tab", "-o",  genes +".raw.clust")

with open(genes +".raw.clust") as c_file:
    clusters=[l[:-1].split("\t") for l in c_file.readlines()]
    clusters = [[g.split("(")[0] for g in v ] for v in clusters]
clusters_p = []

with open(all_syntrophos,"r") as fas:
    id2name_map = {s.description.split(" ")[0] : " ".join(s.description.split(" ")[1:]) for s in SeqIO.parse(fas, "fasta")}
                    
for i,c in enumerate(clusters):
    clusters_p.append(GeneCluster(id2name_map , c))
            
with open(genes + ".proc.clust", 'w') as outfile:
Esempio n. 17
0
def parse(filename, **kwargs):
    # cat outfile | grep ip | cut -d '|' -f 2 | cut -d ' ' -f 3 | cut -d '.' -f 4 | sort -n | wc -l
    return sh.sort(cut(cut(cut(grep(cat(filename), "ip"), d="|", f=2), d=" ", f=3), d=".", f=4), "-n", _out=kwargs.get("_out"))
Esempio n. 18
0
#************************************************************************************
#*									  	    *
#*		Code below show cases cut function via SH repository		    *
#*										    *
#*										    *
#************************************************************************************

import sh

var = sh.cut('-d', '-', '-f5', 'index001.txt')

print(var)
Esempio n. 19
0
def shortURL(url):
    return sh.cut("-d' '", "-f2", sh.grep("Location: ", sh.curl("-i", "https://git.io", "-F", "url=https://github.com/urbn/FPcom/pull/1714")))
Esempio n. 20
0
        fileinfo = sh.file(path)
        if "ELF" in fileinfo and "executable" in fileinfo:
            executable.add(relpath)

if len(gitIgnorePath) < 1:
    print >> sys.stderr, "Couldn't find git repo."
    sys.exit(1)
elif len(gitIgnorePath) > 1:
    print >> sys.stderr, "Found more than one .gitignore, using least nested for filtering."
    gitIgnorePath = [min(gitIgnorePath, key=lambda x: op.normpath(x).count(os.sep))]

print ".gitignore path: " + op.join(gitIgnorePath[0], ".gitignore")

# Figure out which files git will ignore
os.chdir(gitIgnorePath[0])
ignored_files = sh.cut(sh.git("ls-files", "--others", "-i", "--exclude-standard"), "-d ", "-f3-").stdout.split("\n")
relative_git_path = op.relpath(gitIgnorePath[0], args.dir)
ignored_set = set()
for f in ignored_files:
    if f: # Usually one empty newline to filter
        ignored_set.add(op.join(relative_git_path, f).lstrip("./"))
print "Ignored files: " + str(ignored_set)
print "Ignored executables: " + str(executable)

print "Too large before: " + str(tooLarge)
tooLarge -= ignored_set
tooLarge -= executable
print "Too large after: " + str(tooLarge)

doExit = False
while tooLarge:
Esempio n. 21
0
def cmd_vbox_launch():
    o = output(shdmenu(cut(vboxmanage('list', 'vms'), '-d"', '-f2')))
    vboxmanage('-q', 'startvm', o, '--type', 'gui')
Esempio n. 22
0
# boucle sur les fichiers récupérés
for filename in os.listdir(temp_dir):
    if filename.endswith(".mp3"):
        cnt_f = cnt_f + 1
        # reencode pour prendre moins de place
        subprocess.call([
            'lame', '--mp3input', '-b', '32', temp_dir + "/" + filename,
            temp_dir + "/B_" + filename
        ],
                        shell=False)
        os.rename(temp_dir + "/B_" + filename, temp_dir + "/" + filename)
        # construction des items à ajouter dans le feed.xml
        titre = sh.cut(
            sh.grep(
                sh.ffprobe("-v", "error", "-show_format",
                           temp_dir + "/" + filename), "title"), "-d", "=",
            "-f2")
        titre = str(titre)
        titre = titre.replace('\n', '')
        fichier = open(temp_dir + "/" + "items.xml", "a")
        fichier.write("<item>\n")
        fichier.write("<title><![CDATA[" + titre + "]]></title>\n")
        fichier.write(
            "<link>https://raw.githubusercontent.com/tcaenen/tt_pod/master/media/"
            + filename + "</link>\n")
        fichier.write(
            '<enclosure url="https://raw.githubusercontent.com/tcaenen/tt_pod/master/media/'
            + filename +
            '" length="30066338" type="audio/mpeg"></enclosure>\n')
        fichier.write(
Esempio n. 23
0
import sh
print(sh.wpa_cli("-i", "wlan0", "list_networks"))

search1 = "SmashCam01"
networkNum = sh.cut(sh.grep(sh.wpa_cli("-i", "wlan0", "list_networks"), search1), "-f", "1")
sh.wpa_cli("-i", "wlan0", "select_network", networkNum)
print(sh.wpa_cli("-i", "wlan0", "list_networks"))
    


Esempio n. 24
0
import threading
from http.server import HTTPServer
import socket
import logging as log
import atexit
# get_ip_address
from sh import ip, awk, cut

import prometheus_client
import consul


# Do not use addr and remove the address option from the register function
# if you are running this on a regular instance with a local Consul agent
addr = cut(awk(ip("addr", "show", "dev", "eth0"), "/inet / { print $2 }"), "-d/", "-f1").strip()
print(addr)

# consul should be a hostname that is accessible from the clever app container
c = consul.Consul(host="consul")

def deregister_id(service_id):
    def dereg():
        c.agent.service.deregister(service_id)
    return dereg

def start_prometheus_server(name, port_range):
    for port in range(*port_range):
        try:
            httpd = HTTPServer(("0.0.0.0", port), prometheus_client.MetricsHandler)
        except (OSError, socket.error):
            # Python 2 raises socket.error, in Python 3 socket.error is an