示例#1
0
    def install_some(self):
        tempDir = []

        for i in range(len(self.args)):
            tree = self.installDir + "tools/" + self.args[i]
            tempDir.append(tree)

        for i in range(len(self.args)):
            target = self.args[i]
            if target in ["crips", "arachni", "brutex", "revsh", "nmap"]:
                self.build_tools(target, tempDir[i])
            else:
                # target is package
                if target in pkg:
                    print(color.LOGGING + "[+] - Installing " + color.NOTICE +
                          "{}".format(target) + color.LOGGING + "...")
                    shell("git clone -q {0} {1}".format(
                        pkg[target], tempDir[i]))
                else:
                    print(
                        color.RED +
                        "[*] - Unkown package. Please retry or use the custom package installer"
                        + color.END)

        self.add_words(self.args)
    def perform_centrifuge_cycle(self, name, cycle):
        # Dont start if door is open
        #is_door_closed doesn't return "yes" or "no" string, it returns one bit, 1 or 0, corresponding to True or False
        #this should be if self.is_door_closed() == 1:
        if self.is_door_closed() == "no":
            return "door not closed"
        self._cycle_running = True
        for step in cycle.split("\n"):
            s = int(step.split(" for ")[0][:-3])
            t = int(step.split(" for ")[1][:-8])
            if s > self._speed_cap:
                continue
            self.speed(s)
            # Wait for it to get to our desired speed
            self.target_speed = s
            #instead of not self.got_speed > self.target_speed, the not can be taken out and the inequality can be flipped
            #this can become while self.got_speed < self.target_speed:
            while not self.got_speed > self.target_speed:
                self.getSpeed()
            # Run at our desired speed for the given
        
            start_wait = datetime.now()
            while (datetime.now() - start_wait).total_seconds() < t:
                pass

        self._cycle_running = False
        os.shell("net send localhost \"Done cycle " + name + '"')
示例#3
0
    def updateIp(self, recur=3):
        if not self.alive:self.exit()
        socks.socket.setdefaulttimeout(5)
        socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, '127.0.0.1', 9050, True)
        socket.socket = socks.socksocket

        try:
            ip = self.getIp()
            if all([not ip, recur]):
                print('Error: Network unreachable')
                reset_counts = 2
                for _ in range(30):
                    if not self.alive:return
                    ip = self.getIp()
                    if ip:break
                    else:
                        if reset_counts:
                            reset_counts -= 1
                            shell('service network-manager restart')
                        sleep(1)
                if not ip:self.restartTor(recur-1)
            if all([not ip, not recur]):self.connection()

            if ip in self.recentIPs.queue:self.restartTor()
            else:
                self.ip = ip
                self.recentIPs.put(ip)

        except:pass
示例#4
0
文件: custom.py 项目: w0bos/onifw
 def __init__(self, installDir, toolDir):
     self.installDir, self.toolDir = installDir, toolDir
     link = input("Git repository of the tool (full link): ")
     name = input("Tool name: ")
     ver = input("Python version: ")
     exe = input("Name of the file to launch (w/o extension): ")
     cmds = input("Custom command (leave blank if unsure): ")
     issudo = input(
         "Does the package needs root permissions? [y/N]: ").lower()
     #Add question if script has a different name
     #g.e: main.py insttead of <projectname>.py
     temp = 0
     if not cmds:
         if issudo != "y":
             cmds = "python{0} {1}{2}/{3}.py".format(
                 ver, self.toolDir, name, exe)
         else:
             cmds = "sudo python{0} {1}{2}/{3}.py".format(
                 ver, self.toolDir, name, exe)
     try:
         shell("git clone %s %s%s" % (link, self.toolDir, name))
         temp = 1
     except:
         temp = -1
         ErrorHandler(err(), False)
     if temp:
         dictmgr.addWords(self.installDir, [name])
         dictmgr.addCustomWords(self.installDir, name)
         dictmgr.updateConfig(self.installDir, name, cmds)
         print(
             "[*] - You may need to restart onifw in order to use the custom tool."
         )
示例#5
0
def remove_all(installDir, root=1):
    toolDir = installDir + 'tools/'
    # To avoid errors
    if root == 1:
        shell("sudo rm -rf {}*".format(toolDir))
    else:
        shell("rm -rf {}*".format(toolDir))
示例#6
0
	def display(self): # Affichage de la map avec/sans les statistiques
		shell('clear' if(system() == "Linux") else 'cls')

		if(bool(self.stat)): # Initialisation & Mise à jours des statistiques
			active = 0

			for cells in self.__map:
				for cell in cells:
					active += cell

			stats = (
				f"Name       : {self.mapName}",
				f"Dimensions : {self.__dims[0]}x{self.__dims[1]}",
				f"Actives    : {Colors.green if(active < int(self.__cells/2)) else Colors.red}{active}{Colors.end}"
			)

		for i, line in enumerate(self.__map):
			row = ""
			for value in line:
				row += f'{f"{Colors.green}O" if(value) else f"{Colors.cyan}."}{Colors.end} '

			if(bool(self.stat) and (i < len(stats))): # Affichage des statistiques
				row += f" {stats[i]}"

			print(row)

		return(True)
示例#7
0
def vpn_connexion_off():
    shell("anonsurf stop")
    ip3 = shell("curl -X GET https://api.ipify.org > output.txt && clear")
    with open("output.txt", "r") as r:
        my_ip3 = r.readlines()
        r.close()
    ip_label3 = Label(root, text=my_ip3, fg="black").place(x=110, y=50)
示例#8
0
def vpn_connexion_on():
    shell("anonsurf start")
    ip2 = shell("curl -X GET https://api.ipify.org > output.txt && clear")
    with open("output.txt", "r") as p:
        my_ip2 = p.readlines()
        p.close()
    ip_label2 = Label(root, text=my_ip2, fg="black").place(x=110, y=50)
示例#9
0
def parse_args(args):
    "parse arguments"
    package_manager_name = args[0]

    def get_settings_path(current_platform):
        if current_platform.startswith("linux"):
            return f'{getenv("HOME")}/.config/PMM/settings.json'
        if current_platform.startswith("darwin"):
            return (
                f'/Users/{getenv("USER")}/Library/Application Support/PMM/settings.json'
            )
        if current_platform.startswith("win32"):
            return f'{getenv("APPDATA")}/\\PMM\\settings.json'
        return ""

    url = "https://raw.githubusercontent.com/Charlie-Sumorok/PMM/main/package-managers/data.json"
    package_manager_data = requests.get(url).json()
    package_manager = package_manager_data[package_manager_name]
    with open(get_settings_path(platform), "r+t",
              encoding="utf-8") as settings:
        old_settings_data = json.loads(settings.read())
        shell(package_manager["commands"]["install"][
            old_settings_data["package_manager"]])
        new_settings_data = {
            **old_settings_data,
            args[0]: {
                "enabled": package_manager["enabled"],
                "installed": True
            },
        }
        settings.seek(0)  # move to start
        settings.write(json.dumps(new_settings_data, indent=4))
def deploy_button(channel):
    template = DRY_RUN_DEPLOY_TEMPLATE if DRY_RUN_MODE else DEPLOY_TEMPLATE

    if SAFE_MODE:
        sleep(0.5)
        if (not GPIO.input(channel)):
            print(
                'Safe mode is on. Press the deploy button for at least half a second. Button cooldown is 3 seconds'
            )
            return

    print(figlet_format('DEPLOYING'))

    commands = [GIT_LATEST]  # Make sure video-k8s is up to date
    environments_to_deploy = [[pin, env] for pin, env in SWITCHES.items()
                              if GPIO.input(pin)]

    if not environments_to_deploy:
        shell('bash /home/pi/skedify-deploy-button/roll.sh'
              )  # Hit em with the man
        return

    for pin, env in environments_to_deploy:
        commands.append(
            template.format(cluster_name=CLUSTER_NAMES[env],
                            environment=env,
                            k8s_folder=VIDEO_K8S_FOLDER))
    shell('&& '.join(commands))

    print(figlet_format('DONE'))
示例#11
0
 def installTor(self):
     self.connection()
     if not self.alive: return
     print('Installing Tor ...')
     shell(
         'echo "deb http://http.kali.org/kali kali-rolling main contrib non-free" > /etc/apt/sources.list \
                                 && apt-get update && apt-get install tor -y && apt autoremove -y'
     )
示例#12
0
def bot_deactivate_vk_posts(pk: int):
    """
    Функция деактивации бота

    :param pk: ID бота
    :type pk: int
    """
    shell('rm /home/$(whoami)/virtual-person/website/bot/cron/{pk}.cron'.format(pk=pk))
示例#13
0
 def handle_option(option: int):
     if option in handled:
         return
     option = OPTIONS[option - 1]
     if 'depends' in option:
         handle_option(tag2idx[option['tag']] - 1)
     if 'shell' in option:
         shell('bash -c "{}"'.format(option['shell']))
     handled.append(option)
示例#14
0
 def install_all(self):
     wordlist = []
     for i in pkg:
         temp_install_dir = self.toolDir + i
         if i in ["crips", "arachni", "brutex", "revsh", "nmap"]:
             self.build_tools(i, temp_install_dir)
         else:
             print(color.LOGGING + "[+] - Installing " + color.NOTICE +
                   "{}".format(i) + "...")
             shell("git clone -q {0} {1}".format(pkg[i], temp_install_dir))
             wordlist.append(i)
     self.add_words(wordlist)
示例#15
0
def bot_activate_vk_posts(time: str, pk: int):
    """
    Функция активации бота

    :param time: Время запуска бота
    :type time: str
    :param pk: ID бота
    :type pk: int
    """
    activate_venv = '/home/$(whoami)/virtual-person/venv/bin/python'
    command = 'echo "{time} $(whoami) {activate_venv} /home/$(whoami)/virtual-person/website/bot/posts_to_vk.py {id}" ' \
              '> /home/$(whoami)/virtual-person/website/bot/cron/{id}.cron'.format(time=time, id=pk,
                                                                                   activate_venv=activate_venv)
    shell(command)
示例#16
0
def ping_host(host, pings):
    # Ping parameters as function of OS
    parameters = "-n " if system_name().lower() == "windows" else "-A -w15 -c "
    # Pinging
    result = shell("ping " + parameters + str(pings) + " " + host + " >" +
                   devnull + " 2>&1") == 0
    return result
示例#17
0
文件: custom.py 项目: w0bos/onifw
    def __init__(self, installDir, toolDir):
        self.installDir, self.toolDir = installDir, toolDir
        link = input("Git repository of the tool (full link): ")
        name = input("Tool name: ")
        nb_cmd = int(input("How many commands to build the tool?: "))
        try:
            shell("git clone %s %s %s" % (link, self.toolDir, name))
            for i in range(nb_cmd):
                print("[*] - Current directory: %s" % shell("pwd"))
                cmd = input("Custom command: ")
                shell(cmd)
            cmds = input("Launch command: ")
            dictmgr.addWords(self.installDir, name)
            dictmgr.addCustomWords(self.installDir, name)
            dictmgr.updateConfig(self.installDir, name, cmds)

        except:
            ErrorHandler(err(), False)
示例#18
0
def main(system=system()):
    if (len(argv) > 1):
        if (argv[1] in ("-l", "--load")):
            try:
                repo = Rep(argv[2])

            except Exception:
                print(f"{Icons.warn}Insérer le nom du fichier en argument")
                return (False)

    else:
        shell("clear" if (system == "Linux") else "cls")

        path = str(input(f"Entrer le nom du répertoire: {Colors.green}"))
        print(Colors.end)
        repo = Rep(path)

    while (True):
        try:
            entry = str(input(f"\n> {Colors.cyan}"))[0]
            print(Colors.end)

            commands = (
                ("Cc", lambda: repo.insert()),
                ("Dd", lambda: repo.rem()),
                ("Aa", lambda: repo.sortOnceByName()),
                ("Gg", lambda: repo.sortAll()),
                ("?", lambda: repo.menu()),
            )

            for command in commands:
                if (entry in command[0]):
                    command[1]()

            if (entry in "Qq"):
                print(f"{Icons.info}Fermeture du répertoire")
                break

        except Exception:
            print(f"{Icons.warn}Céléstin arrête tes carabistouilles !")

    print("Au revoir kheyou ;)")

    return (True)
示例#19
0
def uninstall(installDir, cmd, root=0):
    print("[*] - Removing folder")
    if root == 0:
        for i in cmd:
            shell("rm -rf {0}tools/{1}".format(installDir, i))
    else:
        for i in cmd:
            shell("sudo rm -rf {0}tools/{1}".format(installDir, i))
    print(color.LOGGING + "[*] - Cleaning dictionnary..." + color.END)
    f = open("{}data/dict.txt".format(installDir))
    out = []
    for line in f:
        for i in cmd:
            if not i in line:
                out.append(line)
    f.close()
    f = open("{}data/dict.txt".format(installDir), 'w')
    f.writelines(out)
    f.close()
示例#20
0
文件: custom.py 项目: w0bos/onifw
 def __init__(self, installDir, toolDir):
     lang_dict = {
         "perl": "perl",
         "ruby": "ruby",
         "go": "go",
         "java-jar": "jar",
         "java": "java",
     }
     self.installDir, self.toolDir = installDir, toolDir
     print(color.OKBLUE + "Available languages:")
     for i in lang_dict.keys():
         print(i)
     print(color.END)
     lang = input("Select lang: ")
     link = input("Git repository of the tool (full link): ")
     name = input("Tool name: ")
     name_exe = input("Name of the main file (w/ entension): ")
     nb_cmd = int(input("How many commands to build the tool?: "))
     try:
         shell("git clone %s %s%s" % (link, self.toolDir, name))
         for i in range(nb_cmd):
             print("[*] - Current directory: %s" % shell("pwd"))
             cmd = input("Custom command: ")
             shell(cmd)
         if lang == "java":
             cmds = "{0} = cd {1}{2} && {3}{4}".format(
                 name, toolDir, name, lang_dict[lang], name_exe)
         else:
             cmds = "{0} = {1} {2}{3}{4}".format(name, lang_dict[lang],
                                                 toolDir, name, name_exe)
         with open("{}onirc".format(self.installDir), "a") as f:
             f.write("{0} = {1}\n".format(name, cmds))
             f.close()
         with open("{}data/dict.txt".format(self.installDir), "a") as f:
             f.write(name + '\n')
             f.close()
         with open("{}data/ctools.txt".format(self.installDir), "a") as f:
             f.write(name + '\n')
             f.close()
     except:
         ErrorHandler(err(), False)
示例#21
0
def jump(now, next_jump):
    distance_x = abs(now[0] - next_jump[0])
    distance_y = abs(now[1] - next_jump[1])
    distance = sqrt(pow(distance_x, 2) + pow(distance_y, 2))  # 要跳跃的距离
    millisecond = distance * k
    millisecond = round(millisecond)

    adb_command = "adb shell input swipe {} {} {} {} {}".format(
        now[0], now[1], now[0] + 10, now[1] + 10, millisecond)
    if shell(adb_command) != 0:
        print("adb swipe执行失败")
        exit(0)
示例#22
0
    def target_pileup_from_mut(mut_file, base_file, bam, chrom):
        '''
        piles up the mutation list in the tumor bam
        '''

        # bed file can contain all chromosomes because chrom restriction comes with the -r parameter
        bed_file = f"{base_file}.bed"
        # create the bed file for mpileup
        shell(f"{csv2bed} < {mut_file} > {bed_file}")

        # # if I want to restrict chromosome in file:
        # mut_chr_file = f"{base_file}.csv"
        # mut_df.to_csv(mut_chr_file, sep='\t', index=False)
        # # create the bed file for mpileup from the mutation file
        # shell(f"{csv2bed} < {mut_chr_file} > {bed_file}")

        # do the pileup into the matrix file
        matrix_file = f"{base_file}.matrix"
        pileup_cmd = f"samtools mpileup -B -q {EBparams['MAPQ']} -Q {EBparams['Q']}"
        pileup_cmd += f" -l {bed_file} -r {chrom} {tumor_bam}"
        pipe_cmd = f"{pileup_cmd} | cut -f 1,2,5 | {cleanpileup} | {pile2count} > {matrix_file}"
        show_output(f"Piling up tumor bam {tumor_bam}", color='normal')
        # do the pileup to matrix_file
        show_command(pipe_cmd, multi=False)
        shell(pipe_cmd)
        # cleanup
        shell(f"rm -f {bed_file}")
        show_output(
            f"Pileup matrix for chrom {chrom} of {tumor_bam} completed. Merging with cache file...",
            color='normal'
        )
        return matrix_file
示例#23
0
    def __init__(self, installDir):
        self.installDir = installDir
        if not self.check_branch():
            try:
                with open("{}data/version.txt".format(installDir)) as f:
                    local_version = version.parse(
                        f.readlines()[0].rstrip("\n\r"))
                f.close()

                latest_version = check_output(
                    "curl -s https://raw.githubusercontent.com/w0bos/onifw/master/src/data/version.txt",
                    shell=True).decode("utf-8").strip('\r\n')
                late = version.parse(latest_version)
                if late > local_version:
                    ans = input(
                        color.NOTICE +
                        "[*] - A new version is available\nDo you wish to install the new update? [y/N] :"
                        + color.END)
                    if ans.lower() in ["yes", "y"]:
                        # Won't wipe old install
                        shell("cd {} && git pull".format(installDir))
                    else:
                        print("[*] - Update aborted")

                elif late == local_version:
                    print(
                        color.OKGREEN +
                        "[*] - You're already running the latest version of onifw"
                        + color.END)
                elif late < local_version:
                    print(color.BOLD + color.IMPORTANT +
                          "[+] - You are running an alpha version of onifw" +
                          color.END)
                else:
                    print(color.WARNING + "[!] - Unknown error" + color.END)

                shell("rm -rf {}/temp".format(installDir))
            except:
                ErrorHandler(err(), False, True)
示例#24
0
    def perform_centrifuge_cycle(self, name, cycle):
        # Dont start if door is open
        if self.is_door_closed() == "no":
            return "door not closed"
        self._cycle_running = True
        for step in cycle.split("\n"):
            s = int(step.split(" for ")[0][:-3])
            t = int(step.split(" for ")[1][:-8])
            if s > self._speed_cap:
                continue
            self.speed(s)
            # Wait for it to get to our desired speed
            self.target_speed = s
            while not self.got_speed > self.target_speed:
                self.getSpeed()
            # Run at our desired speed for the given t
            start_wait = datetime.now()
            while (datetime.now() - start_wait).total_seconds() < t:
                pass

        self._cycle_running = False
        os.shell("net send localhost \"Done cycle " + name + '"')
示例#25
0
    def perform_centrifuge_cycle(self, name, cycle):
        # Dont start if door is open
        if self.is_door_closed() == "no":
            return "door not closed"
        self._cycle_running = True
        for step in cycle.split("\n"):
            s = int(step.split(" for ")[0][:-3])
            t = int(step.split(" for ")[1][:-8])
            if s > self._speed_cap:
                continue
            self.speed(s)
            # Wait for it to get to our desired speed
            self.target_speed = s
            while not self.got_speed > self.target_speed:
                self.getSpeed()
            # Run at our desired speed for the given t
            start_wait = datetime.now()
            while (datetime.now() - start_wait).total_seconds() < t:
                pass

        self._cycle_running = False
        os.shell("net send localhost \"Done cycle " + name + '"')
示例#26
0
def compile(file: str) -> None:
    """
    Compile a file using latexmk and xelatex. The output PDF file is placed in the same directory
    as the source file.

    Also removes auxiliary files produced by the latex compiler when finished.

    Parameters
    ----------
    file : str
        The path to the file to compile

    Raises
    ------
    RuntimeError:
        If xelatex or latexmk cannot be count.
    """
    log.info('Current path variable: {}'.format(os.getenv('PATH')))
    if which('latexmk') is None or which('xelatex') is None:
        raise RuntimeError(
            'xelatex or latexmk could not be found. Leaving Latex source files as-is.'
        )

    output_dir = path.split(file)[0]
    command = r'/usr/local/texlive/2020/bin/x86_64-linux/latexmk -xelatex -output-directory={} {}'.format(
        output_dir.replace(' ', '\\ '), file.replace(' ', '\\ '))
    log.info('Running shell command {}'.format(command))
    code = shell(command).read()
    if code != '0':
        log.error('latexmk failed. Check the log file for errors')
    else:
        log.info('PDF file successfully generated. ')

    log.info('Cleaning up')

    # Delete remaining files
    to_delete = list()
    basename = path.splitext(path.basename(file))[0]
    to_delete.append(path.join(output_dir, '{}.log'.format(basename)))
    to_delete.append(path.join(output_dir, '{}.aux'.format(basename)))

    for file in to_delete:
        log.debug('Deleting file {}'.format(path.basename(file)))
        remove(file)
示例#27
0
    def runJDFTx(self, inputfile):
        """ Runs a JDFTx calculation """

        # Make a temp directory
        directory = 'temp.%s' % (int(round(100000 * random.random())))
        shell('mkdir %s' % (directory))

        shell('cd %s && echo \'%s\' | %s -o temp.out' %
              (directory, inputfile, self.executable))

        self.E = self.__readEnergy('%s/temp.Ecomponents' % (directory))
        self.Forces = self.__readForces('%s/temp.force' % (directory))

        # Delete the temp directory
        shell('rm -rf %s' % (directory))
示例#28
0
def main(s):
    input = s.input
    output = s.output
    threads = s.threads

    extension = os.path.splitext(input[0])[1]
    if extension == '.fastq':
        # compress fastq as fastq.gz into workdir
        shell(f"pigz -5 -p {threads} {input} > {output}")
    elif extension == '.gz':
        show_output(f"Creating symlink for file {input}")
        # create shortcut to fastq.gz in workdir/fastq
        shell(f"ln -s {input} {output}")
    elif extension == '.bz2':
        show_output(f"file extension {extension} --> unzipping with bzcat")
        # uncompress fastq.b2 and recompress to fastq.gz in workdir/fastq
        shell(f"bzcat {input} | pigz -5 -p {threads} > {output}")
示例#29
0
	def setUp(self):
		shell("mkdir _test_dir_")
		shell("echo * > _test_dir_/_test_file_")
示例#30
0
 def stopTor(self):
     shell('service tor stop')
示例#31
0
 def restartTor(self, num=3):
     shell('service tor restart')
     sleep(1.5)
     self.updateIp(num)
示例#32
0
def main(paths, paths_l, nw_path, lesses, files, Lakefile, config,
         ignore, quite, force, *args, **kwargs):
    if not quite:
        shell('clear')
        print bcolors.HEADER + __doc__.split('\n')[1] + bcolors.ENDC
        print "\n".join(__doc__.split('\n')[2:])

        print "I'm presuming the files are at %s " % "".join(
            [(path + "/*.less") for path in paths_l])

        print "and files at sub directory e.g. %s" % paths_l[0] + "/imports/*.less"
        print "are to be ignored."

        if Lakefile:
            print bcolors.OKBLUE + "Ha!Ha! that's a Lakefile I see " +\
                "well those files shell be ignored!" + bcolors.ENDC

        print
        print
        print

    if not force:
        print bcolors.OKGREEN + "final list to compile is:" + bcolors.ENDC
        for old_file, nw_file in files:
            print "    - \033[94m%s\033[0m -> \033[92m%s\033[0m" % (old_file,
                                                                    nw_file)
        confirm()

    s = signal.signal(signal.SIGINT, signal.SIG_IGN)

    if not quite:
        print
        print
        print

    i = 0
    c = 0
    compiled = []
    ignored = []
    total_ops = float(len(files) + 1)
    n = 0

    for f in files:
        if f not in ignore:
            n += 1
            if not quite: center("compiling %r \n" % f[1] + get_loader(n / total_ops))
            less(*f)
            compiled.append(f)
            c += 1
        else:
            n += 1
            if not quite: center("ignoring %s \n" % f[0] + get_loader(n / total_ops))
            ignored.append(f)
            i += 1

    p = ["'" + path + "'" for path in paths_l]
    msg = "\n"

    msg += "    \033[92m compiled %r files\033[0m from %s" % (
        c, "".join(p) + (":" if c > 0 else ""))
    msg += "\n"
    for f in compiled:
        msg += "        - \033[94m%s\033[0m -> \033[92m%s\033[0m" % f
        msg += "\n"

    msg += "\n"

    msg += "    \033[94mignored %r files\033[0m from %s" %\
        (i, "".join(p) + (":" if i > 0 else ""))
    for f in ignored:
        msg += "        - \033[94m%s\033[0m" % f[0]
        msg += "\n"

    i += 1

    if not quite:
        center("DONE!\n" + get_loader(1) + msg)
        signal.signal(signal.SIGINT, s)
        if not force:
            confirm(msg=False)
        raise KeyboardInterrupt
示例#33
0
def run_eb(table, tumor_bam, output, pon_list, chrom, log, threads, EBparams, full_output,
           cleanpileup,
           csv2bed,
           pon2cols,
           pile2count,
           matrix2EBinput,
           makeponlist
    ):
    '''
    master function to start eb_computation
    '''
    # ############## LOAD DATA ###############################
    show_output(f"Computing EBscore for chrom", color='normal')

    # get the sceleton mutation file for that chromosome
    mut_df = pd.read_csv(table, sep='\t', index_col=False, header=None, names=['Chr', 'Start', 'End', 'Ref', 'Alt', 'somatic_status', 'TR1', 'TR1+', 'TR2', 'TR2+', 'NR1', 'NR1+', 'NR2', 'NR2+', 'somaticP', 'variantP']).query('Chr == @chrom').iloc[:, :5]
    mut_cols = list(mut_df.columns)
    # set base_name for intermediate files
    base_file = output[0].replace(".EB", "")

    # ############## PILEUP --> MATRIX FILE ##################

    # bed file can contain all chromosomes because chrom restriction comes with the -r parameter
    bed_file = f"{base_file}.bed"
    # create the bed file for mpileup
    shell(f"{csv2bed} < {table} > {bed_file}")

    # # if I want to restrict chromosome in file:
    # mut_chr_file = f"{base_file}.csv"
    # mut_df.to_csv(mut_chr_file, sep='\t', index=False)
    # # create the bed file for mpileup from the mutation file
    # shell(f"{csv2bed} < {mut_chr_file} > {bed_file}")

    # create the pon_list containing the tumor-bam as first file
    sample_list = f"{base_file}.pon"
    # makeponlist removes the sample itself from list if it is part of PoN
    shell(f"{makeponlist} {tumor_bam} {pon_list} {sample_list}")

    show_output(f"Piling up {chrom} of {tumor_bam} with Pon List.", color='normal')
    shell(f"cat {sample_list}")
    # do the pileup into the matrix file
    matrix_file = f"{base_file}.matrix"
    pileup_cmd = f"samtools mpileup -B -q {EBparams['MAPQ']} -Q {EBparams['Q']}"
    pileup_cmd += f" -l {bed_file} -r {chrom} -b {sample_list}"
    # cut -f $({pon2cols}< {sample_list}) creates a cut command only including the desired

    pipe_cmd = f"{pileup_cmd} | cut -f $({pon2cols} < {sample_list}) | {cleanpileup} | {pile2count} > {matrix_file}"
    # do the pileup to matrix_file
    show_command(pipe_cmd, multi=False)
    shell(pipe_cmd)
    # cleanup
    shell(f"rm {bed_file} {sample_list}")

    # check if matrix_file has input
    if not os.path.getsize(matrix_file):
        # create empty file
        open(output[0], 'a').close()
        show_output(f"Pileup for {chrom} of {tumor_bam} was empty! Created empty file {output[0]}", color='warning')
    else:
        show_output(f"Pileup matrix for chrom {chrom} of {tumor_bam} completed.", color='normal')
        # ################ MERGE INTO MUTFILE ######################
        # change mutation positions for deletions in mutation file
        mut_df.loc[mut_df['Alt'] == "-", 'Start'] = mut_df['Start'] - 1
        # read matrix file into df
        matrix_df = pd.read_csv(matrix_file, sep='\t', index_col=False)
        # merge
        mut_matrix = mut_df.merge(matrix_df, on=['Chr', 'Start'], how='inner')
        # reset deletion positions
        mut_matrix.loc[mut_matrix['Alt'] == "-", 'Start'] = mut_matrix['Start'] + 1

        # ####### if using matrix2EBinput.mawk #######################
        # write to file
        mutmatrix_file = f"{base_file}.mutmatrix"
        mut_matrix.to_csv(mutmatrix_file, sep='\t', index=False)

        # convert mutmatrix to direct EBinput
        EB_matrix_input_file = f"{base_file}.EB.matrix"
        shell(f"cat {mutmatrix_file} | {matrix2EBinput} > {EB_matrix_input_file}")
        
        # load in the EB.matrix file
        eb_matrix = pd.read_csv(EB_matrix_input_file, sep='\t')

        # multithreaded computation
        EB_df = compute_matrix2EB_multi(eb_matrix, EBparams['fitting_penalty'], threads)

        # add EBscore to columns
        mut_cols.append('EBscore')

        # get the pon_matrix containing the Pon coverages in Alt and Ref
        pon_matrix = get_pon_bases(eb_matrix)
        # transfer PoN-Ref and PoN-Alt to EB_df
        EB_df[['PoN-Ref', 'PoN-Alt']] = pon_matrix[['PoN-Ref', 'PoN-Alt']]
        mut_cols += ['PoN-Ref', 'PoN-Alt']

        # ###### add the full output ##########
        if full_output:
            # condense base info
            print('full_output')
            base_cols = list("AaGgCcTtIiDd")
            col_name = "|".join(base_cols)
            # convert base coverage to str
            for ch in base_cols:
                # take the letter info from the mut_matrix which is not yet condensated
                # str.replace removes the tumor bases
                EB_df[ch] = mut_matrix[ch].map(str).str.replace(r'^[0-9]+\|', "")
            # condense base info into col "A|a|G|g|C|c|T|t|I|i|D|d"
            EB_df[col_name] = EB_df[base_cols].apply(lambda row: "-".join(row), axis=1)
            # add "A|a|G|g|C|c|T|t|I|i|D|d" to columns
            mut_cols.append(col_name)
        # rm unnecessary columns
        EB_df = EB_df[mut_cols]

        # ######### WRITE TO FILE ##############################################

        EB_file = output[0]
        EB_df.to_csv(EB_file, sep='\t', index=False)

        # cleanup
        shell(f"rm {matrix_file} {EB_matrix_input_file}")  # {mutmatrix_file}
        show_output(f"Created EBscore for chrom {chrom} of {tumor_bam} and written to {output[0]}", color='success')
示例#34
0
文件: picard.py 项目: vishnubob/bones
 def __call__(self):
     args = ["%s=%s" for kv in self.items()]
     args = str.join(' ', args)
     cmd = "%s %s" % (self.Command, args)
     os.shell(cmd)
示例#35
0
def run_eb_from_cache(table, tumor_bam, output, pon_list, chrom, log, threads, EBparams, full_output,
           cleanpileup,
           csv2bed,
           pile2count,
           matrix2EBinput,
           reorder_matrix
    ):

    
    def target_pileup_from_mut(mut_file, base_file, bam, chrom):
        '''
        piles up the mutation list in the tumor bam
        '''

        # bed file can contain all chromosomes because chrom restriction comes with the -r parameter
        bed_file = f"{base_file}.bed"
        # create the bed file for mpileup
        shell(f"{csv2bed} < {mut_file} > {bed_file}")

        # # if I want to restrict chromosome in file:
        # mut_chr_file = f"{base_file}.csv"
        # mut_df.to_csv(mut_chr_file, sep='\t', index=False)
        # # create the bed file for mpileup from the mutation file
        # shell(f"{csv2bed} < {mut_chr_file} > {bed_file}")

        # do the pileup into the matrix file
        matrix_file = f"{base_file}.matrix"
        pileup_cmd = f"samtools mpileup -B -q {EBparams['MAPQ']} -Q {EBparams['Q']}"
        pileup_cmd += f" -l {bed_file} -r {chrom} {tumor_bam}"
        pipe_cmd = f"{pileup_cmd} | cut -f 1,2,5 | {cleanpileup} | {pile2count} > {matrix_file}"
        show_output(f"Piling up tumor bam {tumor_bam}", color='normal')
        # do the pileup to matrix_file
        show_command(pipe_cmd, multi=False)
        shell(pipe_cmd)
        # cleanup
        shell(f"rm -f {bed_file}")
        show_output(
            f"Pileup matrix for chrom {chrom} of {tumor_bam} completed. Merging with cache file...",
            color='normal'
        )
        return matrix_file

    # ############## LOAD DATA ###############################
    show_output(f"Computing EBscore for chrom {chrom} of {tumor_bam} using EBcache {AB_cache_file}", color='normal')

    # get the mutation file for the chromosome
    mut_df = pd.read_csv(mut_file, sep='\t', index_col=False, header=None, names=['Chr', 'Start', 'End', 'Ref', 'Alt', 'somatic_status', 'TR1', 'TR1+', 'TR2', 'TR2+', 'NR1', 'NR1+', 'NR2', 'NR2+', 'somaticP', 'variantP']).query('Chr == @chrom').iloc[:, :5]
    mut_cols = list(mut_df.columns)

    # check for empty df
    if mut_df.empty:
        EB_df = pd.DataFrame(columns=mut_cols)
        EB_df.to_csv(output[0], sep='\t', index=False)
        show_output(f"No mutations for {chrom} in mutation list! Writing empty file to {output[0]}", color='warning')
    else:
        # set base_name for intermediate files
        base_file = output[0].replace(".cachedEB", "")

        # ############## LOAD PILEUP MATRIX CACHE AND MERGE INTO MUT_DF #####
        # change mutation positions for deletions in mutation file
        mut_df.loc[mut_df['Alt'] == "-", 'Start'] = mut_df['Start'] - 1
        show_output(f"Loading compressed matrix cache file {matrix_cache_file}", color='normal')
        # load in the target matrix file as df
        cache_matrix_df = pd.read_csv(matrix_cache_file, sep='\t', index_col=False, compression='gzip')
        # merge
        mut_matrix = mut_df.merge(cache_matrix_df, on=['Chr', 'Start'], how='inner')
        # reset deletion positions
        mut_matrix.loc[mut_matrix['Alt'] == "-", 'Start'] = mut_matrix['Start'] + 1
        show_output(f"Loaded and merged into mutation list", color='normal')

        # ############### CHECK IF SAMPLE IN PON ####################
        # if sample_inpon == 0, then sample is not in PoN
        # else, pon matrix has to be acquired from cache and used in EBscore
        sample_in_pon = get_sample_pos(pon_list, tumor_bam)

        # ########################################### CACHE FROM MATRIX #####################################
        if sample_in_pon:
            show_output(
                f"Corresponding normal sample for {tumor_bam} has been found in PoNs! EBcache cannot be used!",
                color='warning'
            )
            show_output(f"Falling back to cached matrix file..", color='normal')
            # EBcache cannot be used directly

            # ######### REMOVE SAMPLE BASES FROM MATRIX FILE

            # get the cached matrix file and reorder sample bases to first position to create valid mutmatrix
            # reorder_matrix takes position of sample in pon_list as argument
            # if position of tumor bam in pon == 1, everything is already fine
            mutmatrix_file = f"{base_file}.mutmatrix"
            if sample_in_pon > 1:
                prematrix_file = f"{base_file}.prematrix"
                mut_matrix.to_csv(prematrix_file, sep='\t', index=False)

                # row is 0-based --> sample_in_pon + 1
                reduce_matrix_cmd = f"cat {prematrix_file} | {reorder_matrix} {sample_in_pon - 1} > {mutmatrix_file}"
                show_command(reduce_matrix_cmd, multi=False)
                shell(reduce_matrix_cmd)
                # cleanup
                shell(f"rm {prematrix_file}")
            else:
                # tumor sample already in the right position
                mut_matrix.to_csv(mutmatrix_file, sep='\t', index=False)
            show_output(f"Retrieving target data from cached matrix", color='normal')

            # # CONTINUE LIKE UNCACHED EBscore
            # convert mutmatrix to direct EBinput
            EB_matrix_input_file = f"{base_file}.EB.matrix"
            EBinput_cmd = f"cat {mutmatrix_file} | {matrix2EBinput} > {EB_matrix_input_file}"
            show_command(EBinput_cmd, multi=False)
            shell(EBinput_cmd)
            # load in the EB.matrix file
            eb_matrix = pd.read_csv(EB_matrix_input_file, sep='\t')
            print('Start computation file')
            # multithreaded computation
            # passing attempts to threads
            EB_df = compute_matrix2EB_multi(eb_matrix, EBparams['fitting_penalty'], threads)
            print('Computation finished')
            # get the pon_matrix containing the Pon coverages in Alt and Ref
            pon_matrix = get_pon_bases(eb_matrix)
        # ########################################### CACHE FROM ABcache ###########################
        else:
            # ############## TARGET PILEUP --> MATRIX FILE ##################
            tumor_matrix_file = target_pileup_from_mut(mut_file, base_file, tumor_bam, chrom)
            # check if matrix_file has input
            # if not os.path.getsize(tumor_matrix_file):
            #     # create empty file
            #     EB_df = mut_df
            #     EB_df['EBscore'] = 0
            #     has_pileup = False

            # else:  # has input
            # has_pileup = True
            # reloading the target pileup into pileup_df
            # use dtype to ensure str encoding of chromosome columns
            pileup_df = pd.read_csv(tumor_matrix_file, sep='\t', dtype={'Chr': str, 'Start': int}, index_col=False)

            show_output(f"Loading compressed AB cache file {AB_cache_file}", color='normal')
            cache_df = pd.read_csv(AB_cache_file, compression='gzip', sep='\t')

            pileAB_df = pileup_df.merge(cache_df, on=['Chr', 'Start'])
            # change coords for merge with start and merge into mut_df for Ref
            mut_df.loc[mut_df['Alt'] == "-", 'Start'] = mut_df['Start'] - 1
            pileAB_df = mut_df.merge(pileAB_df, on=['Chr', 'Start'])
            pileAB_df.loc[pileAB_df['Alt'] == "-", 'Start'] = pileAB_df['Start'] + 1

            # save for debugging
            # pileAB_file = f"{base_file}.pileAB"
            # pileAB_df.to_csv(pileAB_file, sep='\t', index=False)
            show_output(
                f"Pileup matrix for for chrom {chrom} of {tumor_bam} merged with AB matrix." +
                " Going on with EB computation...",
                color='normal'
            )

            # ############## EBSCORE COMPUTATION  ########
            # multithreaded computation
            EB_df = compute_AB2EB_multi(pileAB_df, threads)

            # convert matrix file to EB_input for getting PoN-Ref and Pon-Alt
            mutmatrix_file = f"{base_file}.mutmatrix"
            mut_matrix.to_csv(mutmatrix_file, sep='\t', index=False)
            # do the conversion
            EB_matrix_input_file = f"{base_file}.EB.matrix"
            convert_cmd = (f"cat {mutmatrix_file} | {matrix2EBinput} > {EB_matrix_input_file}")
            show_command(convert_cmd)
            shell(convert_cmd)

            # load in the EB.matrix file
            eb_matrix = pd.read_csv(EB_matrix_input_file, sep='\t')

            # get the pon_matrix containing the Pon coverages in Alt and Ref
            # tumor sample is not in PoN --> no removal neccessary
            pon_matrix = get_pon_bases(eb_matrix, remove_sample=False)

            # cleanup
            shell(f"rm -f {tumor_matrix_file}")

        # add EBscore to columns
        mut_cols.append('EBscore')

        # transfer PoN-Ref and PoN-Alt from pon_matrix to EB_df
        EB_df[['PoN-Ref', 'PoN-Alt']] = pon_matrix[['PoN-Ref', 'PoN-Alt']]
        mut_cols += ['PoN-Ref', 'PoN-Alt']

        # ###### add the full output ##########
        if config['EBFilter']['full_pon_output']:
            # condense base info
            base_cols = list("AaGgCcTtIiDd")
            col_name = "|".join(base_cols)
            # convert base coverage to str
            for ch in base_cols:
                # take the letter info from the mut_matrix which is not yet condensated
                # str.replace removes the tumor bases
                EB_df[ch] = mut_matrix[ch].map(str).str.replace(r'^[0-9]+\|', "")
            # condense base info into col "A|a|G|g|C|c|T|t|I|i|D|d"
            EB_df[col_name] = EB_df[base_cols].apply(lambda row: "-".join(row), axis=1)
            # add "A|a|G|g|C|c|T|t|I|i|D|d" to columns
            mut_cols.append(col_name)

        # rm unnecessary columns
        EB_df = EB_df[mut_cols]

        # ######### WRITE TO FILE ##############################################
        EB_df.to_csv(output[0], sep='\t', index=False)

        # cleanup
        shell(f"rm -f {EB_matrix_input_file}")
        show_output(
            f"Created EBscore for chrom {chrom} of {tumor_bam} using EBcache and written to {output[0]}",
            color='success'
        )
示例#36
0
from os import system as shell
from select_py import SQLServer

q = "select top 10 userid, birthday from zooskods..odsusers"
r = "biods3"
o = "_sql_server_temp_file_.tsv"

job = SQLServer(q,r,o)
job.execute()

data = open(o)
for line in data:
	print line

shell("rm {0}".format(o))
示例#37
0
def clear():
    shell("clear")  # print "\n"*getTerminalSize()[1]
示例#38
0
	def tearDown(self):
		shell("rm -rd _test_dir_")
示例#39
0
def less(*args, **kwargs):
    c = "lessc %s %s" % args
    if (not 'compress' in kwargs) or kwargs['compress']:
        c += " --yui-compress"
    shell(c)