示例#1
0
    def call(self, prog, args):

        args_call = list()
        args_call.append(prog)
        for arg in args.lstrip().rstrip().split():
            args_call.append(arg)
        sp_call(args_call, stdout=self.stdout, stderr=self.stderr)
示例#2
0
    def player_win(self):
        """
        Check if the player has won by reaching 2048 in any tile.

        :return: whether the player has won
        :rtype: bool
        """
        for key in self.pos:
            if self.pos[key] == 2048:
                sp_call('clear' if os_nm == 'posix' else 'cls', shell=False)
                print(self.represent())
                return True
        return False
示例#3
0
def run_QoRTs_count(in_bam, in_gtf, out_dir, test_run=False):
    """
    Run QoRTS on aligned .bam files. Functions:
        - Perform QC checks
        - Generate count files

    Require a separate directory for each .bam file acession number.

    :param in_bam
        Input .bam file, one per sample.
    :param in_gtf
        Initial annotation file.
    :param out_dir
        Output directory.

    :result
        Write count.txt file in the output directory.
    """
    stdout = OPEN(os.path.join(out_dir, "run_QoRTs_count.out.txt"), "w")
    stderr = OPEN(os.path.join(out_dir, "run_QoRTs_count.err.txt"), "w")

    args = ["java", "-jar", enCount.config.QORTS_JAR,
            "QC", "--stranded",]
    if test_run:
        args = args + ["--testRun"]
    args = args + [in_bam, in_gtf, out_dir]

    print(" ".join(args))
    return sp_call(args, stdout=stdout, stderr=stderr)
示例#4
0
def run_QoRTs_novel_splices(in_dir, in_gtf, in_size_factors, out_dir,
                            min_count=6):
    """
    Identify novel splice junctions based on minimal read coverage.

    :param in_dir
        Directory with merged count files generated by run_QoRTs_merge_counts.
    :param in_size_factors
        Size factors for all samples. To be used for merging.
    :param in_gtf
        Initial annotation file.
    :param out_dir
        Output directory.
    :param min_count
        Filtering parameter for determining new junctions, suggested=6.

    :result
        Produce .gff file with novel splice junctions and
        updated count files in the output directory.
    """
    stdout = OPEN(os.path.join(out_dir, "run_QoRTs_novel_splices.out.txt"), "w")
    stderr = OPEN(os.path.join(out_dir, "run_QoRTs_novel_splices.err.txt"), "w")
    in_dir = "%s/" % in_dir

    args = ["java", "-jar", enCount.config.QORTS_JAR, "mergeNovelSplices",
                "--minCount", str(min_count), "--stranded",
                in_dir, in_size_factors, in_gtf, out_dir,]

    print(" ".join(args))
    return sp_call(args, stdout=stdout, stderr=stderr)
示例#5
0
def wake_on_lan(request):
    """Run an external wake on lan script."""
    now = (
        datetime.utcnow()
        .replace(microsecond=0, tzinfo=timezone.utc)
        .isoformat()
    )
    Message(
        sender="PyBrowserDash",
        text=f"WOL request from {request.META['REMOTE_ADDR']} at {now}",
        type="log_only",
        time=now,
        data="",
    ).save()
    sp_call(("python", WOL_COMMAND))
    return JsonResponse({"error": None})
示例#6
0
def run_JunctionSeq_analysis(in_decoder, in_gff, in_count_dir, out_dir):
    """
    Run the whole JunctionSeq analysis of differential exon *and* splice
    junction usage.
    :param in_decoder
        Decoder file indicating experimental design.
    :param in_gff.
        .gff file (including novel junctions) as produced by QoRTs.
    :param in_count_dir
        Directory with count files with naming as in decoder.
    :param out_dir
        Output directory.
    :result
        Produce .tab files with differential usage analysiss results in the
        output directory.
    """
    stddir = os.path.dirname(in_decoder)
    stdout = OPEN(os.path.join(stddir,"run_JunctionSeq_analysis.out.txt"), "w")
    stderr = OPEN(os.path.join(stddir,"run_JunctionSeq_analysis.err.txt"), "w")
    in_count_dir = "%s/" % in_count_dir
    out_dir = "%s/" % out_dir

    args = [enCount.config.RSCRIPT, enCount.config.JUNCTIONSEQ_R, in_decoder,
            in_gff, in_count_dir, out_dir]

    print(" ".join(args))
    return sp_call(args, stdout=stdout, stderr=stderr)
示例#7
0
def fShell(szCmd, bRet=False):
	try:
		if bRet:
			return sp_co(szCmd, shell=True)[:-1]	# remove final \n
		else:
			return sp_call(szCmd, shell=True)
	except sp_CalledProcessError:
		return -1
示例#8
0
def main():
    """
    Main loop of the game.
    """
    create_table()
    board = Board()
    while board.game_over is False and board.win is False:
        sp_call('clear' if os_nm == 'posix' else 'cls', shell=False)
        rprint(board.represent())
        board.turn()
    if board.game_over is True:
        print("Game over!")
    elif board.win is True:
        print("You win!")
    pl_name = input("Input your name on the leaderboard: ")
    today = datetime.today().strftime('%Y-%m-%d')
    connection = sqlite3.connect(hs_path)
    cursor = connection.cursor()
    query_tuple = (pl_name, self.score, self.moves, today,)
    cursor.execute("INSERT INTO highscore (player, score, moves, date) \
                   VALUES (?, ?, ?, ?);", query_tuple)
    connection.commit()
示例#9
0
def gtf_to_gff(in_gtf, out_gff):
    """
    Prepare a gff (non-overlapping bins) based on a gtf.
    Calls provided (python2 only) dexseq_prepare_annotation.py script.

    :param in_gtf:
        Input .gtf file.
    :param out_gff:
        Output .gff file.
    :return:
        External process call status.
    """
    args = [PYTHON2_EXEC, DEXSEQ_PREP_ANNOTATION, in_gtf, out_gff]
    print(" ".join(args))
    return sp_call(args)
示例#10
0
def run_star_generate_genome(in_gtf, in_genome_fasta_dir, out_genome_dir,
                             read_length=100):
    """

    Generate a genome index.

    :param in_gtf
        Annotation .gtf file.
    :param in_genome_fasta_dir
        Directory with genome fasta files.
    :param read_length
        Read length. Suggested parameter by STAR documentation is 100.
    :param out_genome_dir
        Directory for generating genome indices.

    :results
        Generate genome index files in out_genome_dir.
    """

    # Assert path ends with a /
    if out_genome_dir.endswith("/"): out_genome_dir += "/"

    tmp_dir = os.path.join(out_genome_dir, "STARtmp")

    # Calculate parameters based on genome length
    ln, refs = _genome_parameters(in_genome_fasta_dir)
    genomeSAindexNbases = int(min(14, 0.5 * log(ln)/log(2) - 1))
    genomeChrBinNbits = int(min(18, log(ln/refs)/log(2)))

    args = [STAR_EXEC, "--runThreadN", str(NUM_THREADS), "--runMode",
            "genomeGenerate", "--genomeDir", out_genome_dir,
            "--outFileNamePrefix", tmp_dir,
            "--genomeSAindexNbases", str(genomeSAindexNbases),
            "--genomeChrBinNbits", str(genomeChrBinNbits)]

    # Genomes with no junctions do not require GTFs
    if in_gtf is not None:
        args.extend(["--sjdbGTFfile", in_gtf, "--sjdbOverhang", str(read_length-1),])

    args.append("--genomeFastaFiles")
    for f in glob.glob(os.path.join(in_genome_fasta_dir, "*.fa")):
        args.append(f)

    print(" ".join(args))
    return sp_call(args)
示例#11
0
def run_QoRTs_size_factors(in_dir, in_decoder, out_file):
    """
    :param  in_dir
        Directory with raw count files.
    :param in_decoder
        Metadata file indicating sample IDs
    :param out_file
        Size factor file to be generated
    """
    stddir = os.path.dirname(out_file)
    stdout = OPEN(os.path.join(stddir, "run_QoRTs_size_factors.out.txt"), "w")
    stderr = OPEN(os.path.join(stddir, "run_QoRTs_size_factors.err.txt"), "w")
    in_dir = "%s/" % in_dir

    args = [enCount.config.RSCRIPT, enCount.config.QORTS_R,
            in_dir, in_decoder, out_file]
    print(" ".join(args))
    return sp_call(args, stdout=stdout, stderr=stderr)
示例#12
0
def run_QoRTs_merge_counts(in_dir, in_decoder, out_dir, test_run=False):
    """
    Merge technical replicates from same biological sample.

    :param in_dir
        Directory with raw count files generated by run_QoRTs_count.
    :param in_decoder
        Decoder file experiment name/technical replicate.
    :param out_dir
        Output directory.

    :result
        Merge count files given by the decoder and place in the output
        directory.
    """
    std_dir = os.path.dirname(in_decoder)
    stdout = OPEN(os.path.join(std_dir, "run_QoRTs_merge_counts.out.txt"), "w")
    stderr = OPEN(os.path.join(std_dir, "run_QoRTs_merge_counts.err.txt"), "w")

    args = ["java", "-jar", enCount.config.QORTS_JAR,
            "mergeAllCounts", in_dir, in_decoder, out_dir]
    print(" ".join(args))
    return sp_call(args, stdout=stdout, stderr=stderr)
示例#13
0
文件: wksp.py 项目: pumpikano/wksp
	osa_cmds = "\n".join(osa_cmds)

	# print osa_cmds

	p = Popen(['osascript', '-'], stdin=PIPE)
	p.communicate(input=osa_cmds)

if __name__ == '__main__':

	proj_name = sys.argv[1]
	if not proj_name:
		print 'Please project a project name to open'
		exit(1)

	proj_dir = sys.argv[2]

	filepath = os.path.join(proj_dir, proj_name + '.json')
	if not os.path.isfile(filepath):
		print 'File %s not present' % (filepath)
		exit(1)

	f = open(filepath, 'r')
	proj_data = json.load(f)

	open_terminal_tabs(proj_data['headed'])
	
	for cmd in proj_data['headless']['commands']:
		sp_call(cmd)


示例#14
0
def run_star(in_fastq_pair, in_genome_dir, out_dir, clip3pAdapterSeq="-"):
    """
        Run STAR aligner on the in_fastq file.

        Produces out_dir/Aligned.out.sam .

        :param in_fastq_pair
            Input .fastq file pair.
        :param in_genome_dir
            Directory with generated genome indices.
        :param out_dir
            Prefix for the output directory.

        :param clip3pAdapterSeq
            string(s): adapter sequences to clip from 3p of each mate.
            If one value is given, it will be assumed the same for both mates.
            Default: -

        :result
            Generate a .bam file sorted by coordinate in out_dir.
            Assume 3' adaptor clipping.

    """
    assert len(in_fastq_pair) == 2
    assert in_fastq_pair[0].endswith(".fastq.gz") or in_fastq_pair[0].endswith(".fastq")
    assert in_fastq_pair[1].endswith(".fastq.gz") or in_fastq_pair[1].endswith(".fastq")
    if not out_dir.endswith("/"): out_dir += "/"


    # Basic options
    args = [STAR_EXEC,
            "--readFilesIn",       in_fastq_pair[0], in_fastq_pair[1],
            "--genomeDir",         in_genome_dir,
            "--runThreadN",        str(NUM_THREADS),
            "--outFileNamePrefix", out_dir,
            "--clip3pAdapterSeq",  clip3pAdapterSeq,
            "--outSAMtype", "BAM", "SortedByCoordinate",]

    # Standard ENCODE options (Manual 2.5.1, p. 7)
    args += [
        "--outFilterType", "BySJout",
        "--outFilterMultimapNmax",  "20",
        "--alignSJoverhangMin",  "8",
        "--alignSJDBoverhangMin", "1",
        "--outFilterMismatchNmax", "999",
        "--alignIntronMin", "20",
        "--alignIntronMax", "1000000",
        "--alignMatesGapMax", "1000000",
    ]

    # Define local RAM limit
    if RAM_LIMIT is not None:
        args += ["--limitBAMsortRAM", str(RAM_LIMIT)]

    # Process .gzip
    if in_fastq_pair[0].endswith(".gz"):
        args.append("--readFilesCommand")
        args.append("zcat")

    print(" ".join(args))
    return sp_call(args)
示例#15
0
from sys import argv

if argv[1] == '430':
    d_freq = 430000000
elif argv[1] == '915':
    d_freq = 915000000
else:
    d_freq = 2450000000

d_dist = argv[2]
d_rxdb = argv[3]
d_txdb = argv[4]
d_dir = './bin_files/' + str(
    argv[1]) + '_' + str(d_rxdb) + 'rx_' + str(d_txdb) + 'tx_' + str(d_dist)

sp_call(['mkdir', d_dir])

d_rxid = argv[5]
d_txid = argv[6]

from distutils.version import StrictVersion

if __name__ == '__main__':
    import ctypes
    import sys
    if sys.platform.startswith('linux'):
        try:
            x11 = ctypes.cdll.LoadLibrary('libX11.so')
            x11.XInitThreads()
        except:
            print "Warning: failed to XInitThreads()"
示例#16
0
 def check_call(*args, **kwargs):
     retval = sp_call(*args, **kwargs)
     if retval:
         raise OSError("Command execution failed!")
示例#17
0
def call(*args, **kw):
    print('Running: ', args[0], '(with kwargs = {})'.format(kw))
    return sp_call(*args, **kw)
示例#18
0
    def check_revocation(self, netloc, port):
        """
        check if one of the chain certs is revoked
        :param netloc: target netloc
        :param port: target port
        :return:
        """
        if self.crlDistributionPoints is not None and self.pem_cert is not None:
            pid = os_getpid()
            temp_cert_path = os_path_join(SQLITE_TEMP_DIR,
                                          'temp_cert_{}.pem'.format(pid))
            try:
                temp_cert_file = open(
                    temp_cert_path, 'w')  # file containing cert in PEM format
            except OSError:
                return None
            else:
                temp_cert_file.write(self.pem_cert)
                temp_cert_file.close()

            for crl_url in self.crlDistributionPoints:  # for each crl given
                _split = urlsplit(crl_url)
                der_path = os_path_join(
                    SQLITE_TEMP_DIR,
                    '{}{}'.format(_split.netloc,
                                  _split.path.replace('/', self._url_sep)))
                pem_path = '{}_{}.pem'.format(der_path, pid)

                if not os_path_exists(
                        der_path
                ):  # create crl in DER format if does not exist
                    opener = build_opener(HTTPSHandler)
                    install_opener(opener)
                    try:
                        req = urlopen(url=crl_url)
                    except:
                        continue
                    data = req.read(10000000)
                    if data is None:
                        continue
                    try:
                        der_file = open(der_path, 'wb')
                    except OSError:
                        continue
                    else:
                        der_file.write(data)
                        der_file.close()

                try:  # PEM file which will contain crl + chain certs
                    pem_file = open(pem_path, 'w')
                except OSError:
                    continue
                else:
                    cmd = [
                        self.openssl_cmd, 'crl', '-inform', 'DER', '-in',
                        der_path, '-outform', 'PEM'
                    ]
                    # print(' '.join(cmd))      # debug
                    sp_call(cmd, stdout=pem_file, stderr=SP_DEVNULL)

                    with sp_Popen([self.echo_cmd],
                                  stdout=SP_PIPE,
                                  stderr=SP_DEVNULL) as proc_echo:
                        cmd = [
                            self.openssl_cmd, 's_client', '-connect',
                            '{}:{}'.format(netloc, port), '-showcerts'
                        ]
                        with sp_Popen(cmd,
                                      stdin=proc_echo.stdout,
                                      stdout=SP_PIPE,
                                      stderr=SP_DEVNULL) as proc_certs:
                            data = proc_certs.stdout.read()
                            ddata = data.decode('utf-8')
                            if ddata is not None:
                                reg = re_compile(
                                    '-+BEGIN CERTIFICATE-+[^-]+-+END CERTIFICATE-+\n'
                                )
                                for res in reg.finditer(ddata):
                                    pem_file.write(res.group(0))
                    pem_file.close()

                    # finally verify if cert is revoked
                    cmd = [
                        self.openssl_cmd, 'verify', '-crl_check', '-CAfile',
                        pem_path, temp_cert_path
                    ]
                    # print(' '.join(cmd))      # debug
                    with sp_Popen(cmd, stdout=SP_PIPE,
                                  stderr=SP_DEVNULL) as proc_verify:
                        data = proc_verify.stdout.read()
                        ddata = data.decode('utf-8')
                        if ddata is not None:
                            reg = re_compile('lookup:([^\n]+)')
                            for res in reg.finditer(ddata):
                                if 'revoked' in res.group(1):
                                    self.add_error(NameError(res.group(1)))
                    os_remove(pem_path)
            os_remove(temp_cert_path)