コード例 #1
0
    def _mount(self, nobrowse):
        """OS specific mount."""
        # mount_afp 'afp://*****:*****@address/share' <mnt_point>
        args = ["mount", "-t", self.protocol, self.connection["mount_url"],
                self.connection["mount_point"]]
        if is_osx():
            if nobrowse:
                args.insert(1, '-o')
                args.insert(2, 'nobrowse')
        elif is_linux():
            args[0] = "mount_afp"
        else:
            raise JSSError("Unsupported OS.")

        subprocess.check_call(args)
コード例 #2
0
    def umount(self, forced=True):
        """Try to unmount our mount point.

        Defaults to using forced method.

        """
        # If not mounted, don't bother.
        if os.path.exists(self.connection['mount_point']):
            if is_osx():
                cmd = ['/usr/sbin/diskutil', 'unmount',
                       self.connection['mount_point']]
                if forced:
                    cmd.insert(2, 'force')
                subprocess.check_call(cmd)
            else:
                cmd = ['umount', self.connection['mount_point']]
                if forced:
                    cmd.insert(1, '-f')
                subprocess.check_call(cmd)
コード例 #3
0
    def _mount(self, nobrowse):
        """OS specific mount."""
        # mount -t cifs -o \
        # username=<user>,password=<password>,domain=<domain>,port=139 \
        # //server/share /mnt/<mountpoint>
        if is_osx():
            args = ["mount", "-t", self.protocol, self.connection["mount_url"],
                    self.connection["mount_point"]]
            if nobrowse:
                args.insert(1, '-o')
                args.insert(2, 'nobrowse')
        elif is_linux():
            args = ["mount", "-t", "cifs","-o",
                    "username=%s,password=%s,domain=%s,port=%s" %
                    (self.connection["username"], self.connection["password"],
                     self.connection["domain"], self.connection["port"]),
                    "//%s/%s" % (self.connection["URL"],
                               self.connection["share_name"]),
                    self.connection["mount_point"]]
        else:
            raise JSSError("Unsupported OS.")

        subprocess.check_call(args)
コード例 #4
0
    def __init__(self, **connection_args):
        """Set up a SMB connection.
        Required connection arguments:
        URL:            URL to the mountpoint in the format, including
                        volume name Ex: 'my_repository.domain.org/jamf'
                        (Do _not_ include protocol or auth info.)
        mount_point:    Path to a valid mount point.
        share_name:     The fileshare's name.
        domain:         Specify the domain.
        username:       For shares requiring authentication, the
                        username.
        password:       For shares requiring authentication, the
                        password.

        Optional connection arguments (Migrated script support):
        jss:            A JSS Object. NOTE: jss_migrated must be True
                        for this to do anything.

        """
        super(SMBDistributionPoint, self).__init__(**connection_args)
        if is_osx():
            self.fs_type = "smbfs"
        elif is_linux():
            self.fs_type = "cifs"
コード例 #5
0
ファイル: test_kraken.py プロジェクト: yesimon/viral-ngs
@pytest.fixture(scope='session')
def krona_db(request, tmpdir_factory, krona, db_type):
    data_dir = join(util.file.get_test_input_path(), db_type)
    db_dir = os.path.join(data_dir, 'db')

    db = str(tmpdir_factory.mktemp('krona_db_{}'.format(db_type)))
    for d in TAXONOMY_FILES:
        src = join(db_dir, 'taxonomy', d)
        dest = join(db, d)
        os.symlink(src, dest)
    krona.create_db(db)
    return db


@pytest.mark.skipif(tools.is_osx(), reason="kraken osx binary does not yet exist on bioconda")
def test_kraken_tool(tmpdir, kraken, kraken_db, input_bam):
    outdir = tempfile.mkdtemp('-kraken')
    out = join(outdir, 'zaire_ebola.kraken')
    out_filtered = join(outdir, 'zaire_ebola.filtered-kraken')
    out_report = join(outdir, 'zaire_ebola.kraken-report')
    kraken.classify(input_bam, kraken_db, out)
    kraken.filter(out, kraken_db, out_filtered, 0.05)
    kraken.report(out_filtered, kraken_db, out_report)
    assert os.path.getsize(out_report) > 0
    assert os.path.getsize(out_filtered) > 0


@pytest.mark.skipif(tools.is_osx(), reason="kraken osx binary does not yet exist on bioconda")
def test_kraken(kraken_db, input_bam):
    out_report = util.file.mkstempfname('.report')
コード例 #6
0
ファイル: test_intrahost.py プロジェクト: tom-dyar/viral-ngs
class TestPerSample(test.TestCaseWithTmp):
    ''' This tests step 1 of the iSNV calling process
        (intrahost.vphaser_one_sample), which runs V-Phaser2 on
        a single sample, reformats the output slightly, and performs
        strand-bias filtering and adds library-bias statistics.
    '''
    @unittest.skipIf(tools.is_osx(),
                     "vphaser2 osx binary from bioconda has issues")
    def test_vphaser_one_sample_indels(self):
        # Files here were created as follows:
        # ref.indels.fasta is Seed-stock-137_S2_L001_001.fasta
        # in.indels.bam was created from Seed-stock-137_S2_L001_001.mapped.bam
        #     as follows:
        # Created two .sam files using samtools view, restricting to ranges
        # 6811-7011 and 13081-13281, respectively. Paired reads were removed
        # from those files by throwing out the second occurence of any read name
        # and anding the flag fields with 16. Then, a random 90% of reads were
        # removed, except that any reads containing the indel variants at
        # 6911 and 13181 were kept. Then the resulting 2 files were combined.
        myInputDir = util.file.get_test_input_path(self)
        inBam = os.path.join(myInputDir, 'in.indels.bam')
        refFasta = os.path.join(myInputDir, 'ref.indels.fasta')
        outTab = util.file.mkstempfname('.txt')
        intrahost.vphaser_one_sample(inBam,
                                     refFasta,
                                     outTab,
                                     vphaserNumThreads=test._CPUS,
                                     minReadsEach=0)
        expected = os.path.join(myInputDir,
                                'vphaser_one_sample_indels_expected.txt')
        self.assertEqualContents(outTab, expected)

    def test_vphaser_one_sample_one_mate_unpaired(self):
        # Files here were created as follows:
        # ref.indels.fasta is Seed-stock-137_S2_L001_001.fasta
        # in.oneunmapped.bam was created from in.indels.bam, with flag 0->89, 16->73:
        # When removing doubly mapped reads, doing so can result in all reads
        # being removed in the case of low-quality runs with few reads
        # This tests that when v-phaser2 input is empty, a blank
        # file is created as output.
        myInputDir = util.file.get_test_input_path(self)
        inBam = os.path.join(myInputDir, 'in.oneunmapped.bam')
        refFasta = os.path.join(myInputDir, 'ref.indels.fasta')
        outTab = util.file.mkstempfname('.txt')
        intrahost.vphaser_one_sample(inBam,
                                     refFasta,
                                     outTab,
                                     vphaserNumThreads=test._CPUS,
                                     minReadsEach=0,
                                     removeDoublyMappedReads=True)
        assert os.path.getsize(outTab) == 0

    @unittest.skipIf(tools.is_osx(),
                     "vphaser2 osx binary from bioconda has issues")
    def test_vphaser_one_sample_2libs(self):
        # in.2libs.bam was created by "manually" editing in.bam and moving about
        # 1/3 of the reads to ReadGroup2.
        myInputDir = util.file.get_test_input_path(self)
        inBam = os.path.join(myInputDir, 'in.2libs.bam')
        refFasta = os.path.join(myInputDir, 'ref.fasta')
        outTab = util.file.mkstempfname('.txt')
        intrahost.vphaser_one_sample(inBam,
                                     refFasta,
                                     outTab,
                                     vphaserNumThreads=test._CPUS,
                                     minReadsEach=6,
                                     maxBias=3)
        expected = os.path.join(myInputDir,
                                'vphaser_one_sample_2libs_expected.txt')
        self.assertEqualContents(outTab, expected)

    @unittest.skipIf(tools.is_osx(),
                     "vphaser2 osx binary from bioconda has issues")
    def test_vphaser_one_sample_3libs_and_chi2(self):
        # In addition to testing that we can handle 3 libraries, this is testing
        #    the chi2_contingency approximation to fisher_exact. The 4th, 5th,
        #    and 6th rows have large enough minor allele count that their
        #    p-values are calculated using the chi2 approximation. The other
        #    rows are testing the 2 x 3 case of fisher_exact.
        # in.3libs.bam was created by "manually" editing in.2libs.bam and moving
        # about 1/2 of the reads in ReadGroup2 to ReadGroup3.
        myInputDir = util.file.get_test_input_path(self)
        inBam = os.path.join(myInputDir, 'in.3libs.bam')
        refFasta = os.path.join(myInputDir, 'ref.fasta')
        outTab = util.file.mkstempfname('.txt')
        intrahost.vphaser_one_sample(inBam,
                                     refFasta,
                                     outTab,
                                     vphaserNumThreads=test._CPUS,
                                     minReadsEach=6,
                                     maxBias=3)
        expected = os.path.join(myInputDir,
                                'vphaser_one_sample_3libs_expected.txt')
        self.assertEqualContents(outTab, expected)
コード例 #7
0
# Unit tests for vphaser tool

__author__ = "*****@*****.**"

import os
import pickle
import unittest
import util.file
import tools
from intrahost import vphaser_main
from test import TestCaseWithTmp, _CPUS


@unittest.skipIf(tools.is_osx(),
                 "vphaser2 osx binary from bioconda has issues")
class TestVPhaser2(TestCaseWithTmp):
    def test_vphaser2(self):
        myInputDir = util.file.get_test_input_path(self)
        inBam = os.path.join(myInputDir, 'in.bam')
        outTab = util.file.mkstempfname('.txt')
        vphaser_main(inBam, outTab, numThreads=_CPUS)
        with open(outTab, 'rt') as outf:
            recs = map(lambda s: s.strip('\n').split('\t'), outf.readlines())
        with open(os.path.join(myInputDir, 'expected.cpickle'), 'rb') as expf:
            expectedRecs = pickle.load(expf)
        # Vphaser2 p-val calculation is unstable and sometimes varies from
        # run to run, so exclude it from comparison.
        self.assertEqual([rec[:4] + rec[5:] for rec in recs],
                         [rec[:4] + rec[5:] for rec in expectedRecs])
        """
        Creation of in.bam:
コード例 #8
0
import itertools
import logging
import os
import os.path
import shlex
import shutil
import subprocess
import tools
import util.file

TOOL_VERSION = '0.9.10'

log = logging.getLogger(__name__)


@tools.skip_install_test(condition=tools.is_osx())
class Diamond(tools.Tool):

    SUBCOMMANDS = ['makedb', 'blastx', 'blastp', 'view']

    def __init__(self, install_methods=None):
        if not install_methods:
            install_methods = [
                tools.PrexistingUnixCommand(shutil.which('diamond'))
            ]
        super(Diamond, self).__init__(install_methods=install_methods)

    def version(self):
        return TOOL_VERSION

    def build(self, db, protein_fastas, options=None, option_string=None):
コード例 #9
0
ファイル: diamond.py プロジェクト: yesimon/viral-ngs
import logging
import os
import os.path
import shlex
import shutil
import subprocess
import tools
import util.file

TOOL_VERSION = '0.8.22'
CONDA_VERSION = tools.CondaPackageVersion('0.8.22', '2')

log = logging.getLogger(__name__)


@tools.skip_install_test(condition=tools.is_osx())
class Diamond(tools.Tool):

    SUBCOMMANDS = ['makedb', 'blastx', 'blastp', 'view']

    def __init__(self, install_methods=None):
        if not install_methods:
            install_methods = [
                tools.CondaPackage("diamond", version=CONDA_VERSION)
            ]
        super(Diamond, self).__init__(install_methods=install_methods)

    def version(self):
        return TOOL_VERSION

    def build(self, db, protein_fastas, options=None, option_string=None):
コード例 #10
0
#!/usr/bin/env python

import os
import sys

import pytest

import tools
from test.pipelines.snakemake import snake
from test.integration.test_diamond import *  # for pytest fixtures


@pytest.mark.skipif(tools.is_osx(), reason="not currently tested under OSX")
@pytest.mark.skipif(sys.version_info < (3, 5),
                    reason="Python version is too old for snakemake.")
def test_pipes(tmpdir_function, diamond_db, taxonomy_db, krona_db, input_bam):
    runner = snake.SnakemakeRunner(workdir=tmpdir_function)
    override_config = {
        'diamond_db': diamond_db,
        'taxonomy_db': taxonomy_db,
        'krona_db': krona_db,
    }
    runner.set_override_config(override_config)
    runner.setup()
    runner.link_samples([input_bam],
                        destination='per_sample',
                        link_transform=snake.rename_raw_bam)
    runner.create_sample_files(sample_files=['samples_metagenomics'])

    krona_out = join(
        runner.config['data_dir'], runner.config['subdirs']['metagenomics'],
コード例 #11
0
    def __init__(self, jss):
        """Populate our distribution point dict from our configuration
        file.

        The JSS server's DistributionPoints is used to automatically
        configure AFP and SMB shares. To make use of this, the repo's
        dictionary should contain only the name of the repo, as found in
        the web interface, and the password for the RW user. This method
        is deprecated, and you should fully specify the required
        connection arguments for each DP in the future.

        Please see the docstrings for the different DistributionPoint
        subclasses for information regarding required configuration
        information and properties.

        jss:      JSS server object

        """
        self.jss = jss
        self._children = []

        # If no distribution points are configured, there's nothing to
        # do here.
        if self.jss.repo_prefs:
            self.dp_info = self.jss.DistributionPoint().retrieve_all()
            # Set up a counter for avoiding name clashes with optional
            # name variable.
            counter = 0
            for repo in self.jss.repo_prefs:
                # Handle AFP/SMB shares, as they can be auto-configured.
                # Legacy system did not require explicit type key.
                if not repo.get('type'):
                    # Must be AFP or SMB.
                    # Use JSS.DistributionPoints information to
                    # automatically configure this DP.
                    for dp_object in self.dp_info:
                        if repo['name'] == dp_object.findtext('name'):
                            name = dp_object.findtext('name')
                            URL = dp_object.findtext('ip_address')
                            connection_type = \
                                dp_object.findtext('connection_type')
                            share_name = dp_object.findtext('share_name')
                            domain = dp_object.findtext('workgroup_or_domain')
                            port = dp_object.findtext('share_port')
                            username = \
                                dp_object.findtext('read_write_username')
                            password = repo.get('password')

                            if is_osx():
                                mount_point = os.path.join('/Volumes',
                                                           share_name)
                            elif is_linux():
                                mount_point = os.path.join('/mnt', share_name)
                            else:
                                raise JSSError("Unsupported OS.")

                            if connection_type == 'AFP':
                                dp = AFPDistributionPoint(URL=URL, port=port,
                                    share_name=share_name,
                                    mount_point=mount_point,
                                    username=username, password=password,
                                    jss=self.jss)
                            elif connection_type == 'SMB':
                                dp = SMBDistributionPoint(URL=URL, port=port,
                                    share_name=share_name,
                                    mount_point=mount_point,
                                    domain=domain, username=username,
                                    password=password,
                                    jss=self.jss)

                            # No need to keep looping.
                            break

                # Handle Explictly declared DP's.
                elif repo.get('type') in ['AFP', 'SMB']:
                    URL = repo['URL']
                    # If found, strip the scheme off the URL
                    # it's reconstructed later
                    if "://" in URL:
                        URL = URL.split('://')[1]

                    connection_type = repo['type']
                    share_name = repo['share_name']
                    # Domain is not used for AFP.
                    domain = repo.get('domain')
                    username = repo['username']
                    password = repo['password']

                    if is_osx():
                        mount_point = os.path.join('/Volumes', share_name)
                    elif is_linux():
                        mount_point = os.path.join('/mnt', share_name)
                    else:
                        raise JSSError("Unsupported OS.")

                    if connection_type == 'AFP':
                        # If port isn't given, assume it's the std of
                        # 548.
                        port = repo.get('port') or '548'
                        dp = AFPDistributionPoint(URL=URL, port=port,
                                                share_name=share_name,
                                                mount_point=mount_point,
                                                username=username,
                                                password=password,
                                                jss=self.jss)
                    elif connection_type == 'SMB':
                        # If port isn't given, assume it's the std of
                        # 139.
                        port = repo.get('port') or '139'
                        dp = SMBDistributionPoint(URL=URL, port=port,
                                                share_name=share_name,
                                                mount_point=mount_point,
                                                domain=domain,
                                                username=username,
                                                password=password,
                                                jss=self.jss)

                elif repo.get('type') == 'JDS':
                    dp = JDS(jss=self.jss)
                else:
                    raise ValueError('Distribution Point Type not recognized.')

                # Add the DP to the list.
                self._children.append(dp)
コード例 #12
0
    def is_mounted(self):
        """ Test for whether a mount point is mounted.

        If it is currently mounted, determine the path where it's
        mounted and update the connection's mount_point accordingly.
        """
        mount_check = subprocess.check_output('mount').splitlines()
        # The mount command returns lines like this on OS X...
        # //[email protected]/JSS%20REPO on /Volumes/JSS REPO
        # (afpfs, nodev, nosuid, mounted by local_me)
        # and like this on Linux...
        # //pretendco.com/jamf on /mnt/jamf type cifs (rw,relatime,
        # <options>...)

        valid_mount_strings = self._get_valid_mount_strings()
        was_mounted = False

        for mount in mount_check:
            if is_osx():
                fs_match = re.search('\(([\w]*),*.*\)$', mount)
            if is_linux():
                fs_match = re.search('type ([\w]*) \(.*\)$', mount)
            if fs_match:
                fs_type = fs_match.group(1)
            else:
                fs_type = None
            # Automounts, non-network shares, and network shares
            # all have a slightly different format, so it's easiest to
            # just split.
            mount_string = mount.split(' on ')[0]
            # Does the mount_string match one of our valid_mount_strings?
            if [mstring for mstring in valid_mount_strings if
                mstring in mount_string] and self.fs_type == fs_type:
                # Get the mount point string between from the end back to
                # the last "on", but before the options (wrapped in
                # parenthesis). Considers alphanumerics, / , _ , - and a
                # blank space as valid, but no crazy chars.
                if is_osx():
                    mount_point_match = re.search('on ([\w/ -]*) \(.*$',
                                                  mount)
                elif is_linux():
                    mount_point_match = re.search(
                        'on ([\w/ -]*) type .*$', mount)
                if mount_point_match:
                    mount_point = mount_point_match.group(1)
                else:
                    mount_point = None
                was_mounted = True
                # Reset the connection's mount point to the discovered
                # value.
                if mount_point:
                    self.connection['mount_point'] = mount_point
                    if self.connection['jss'].verbose:
                        print("%s is already mounted at %s.\n" % \
                              (self.connection['URL'], mount_point))

                # We found the share, no need to continue.
                break

        if not was_mounted:
            # If the share is not mounted, check for another share
            # mounted to the same path and if found, incremement the
            # name to avoid conflicts.
            count = 1
            while os.path.ismount(self.connection['mount_point']):
                self.connection['mount_point'] = "%s-%s" % \
                    (self.connection['mount_point'], count)
                count += 1

        # Do an inexpensive double check...
        return os.path.ismount(self.connection['mount_point'])
コード例 #13
0
ファイル: test_kraken.py プロジェクト: flyfl1992/viral-ngs
@pytest.fixture(scope='session')
def krona_db(request, tmpdir_factory, krona, db_type):
    data_dir = join(util.file.get_test_input_path(), db_type)
    db_dir = os.path.join(data_dir, 'db')

    db = str(tmpdir_factory.mktemp('krona_db_{}'.format(db_type)))
    for d in TAXONOMY_FILES:
        src = join(db_dir, 'taxonomy', d)
        dest = join(db, d)
        os.symlink(src, dest)
    krona.create_db(db)
    return db


@pytest.mark.skipif(tools.is_osx(),
                    reason="kraken osx binary does not yet exist on bioconda")
def test_kraken_tool(tmpdir, kraken, kraken_db, input_bam):
    outdir = tempfile.mkdtemp('-kraken')
    out = join(outdir, 'zaire_ebola.kraken')
    out_filtered = join(outdir, 'zaire_ebola.filtered-kraken')
    out_report = join(outdir, 'zaire_ebola.kraken-report')
    kraken.classify(input_bam, kraken_db, out)
    kraken.filter(out, kraken_db, out_filtered, 0.05)
    kraken.report(out_filtered, kraken_db, out_report)
    assert os.path.getsize(out_report) > 0
    assert os.path.getsize(out_filtered) > 0


@pytest.mark.skipif(tools.is_osx(),
                    reason="kraken osx binary does not yet exist on bioconda")
コード例 #14
0
ファイル: test_kraken.py プロジェクト: tianyabeef/viral-ngs
@pytest.fixture(scope='session')
def krona_db(request, tmpdir_factory, krona, db_type):
    data_dir = join(util.file.get_test_input_path(), db_type)
    db_dir = os.path.join(data_dir, 'db')

    db = str(tmpdir_factory.mktemp('krona_db_{}'.format(db_type)))
    for d in TAXONOMY_FILES:
        src = join(db_dir, 'taxonomy', d)
        dest = join(db, d)
        os.symlink(src, dest)
    krona.create_db(db)
    return db


@pytest.mark.skipif(tools.is_osx(),
                    reason="kraken osx binary does not yet exist on bioconda")
def test_kraken_tool(tmpdir, kraken, kraken_db, input_bam):
    outdir = tempfile.mkdtemp('-kraken')
    out = join(outdir, 'zaire_ebola.kraken')
    out_filtered = join(outdir, 'zaire_ebola.filtered-kraken')
    out_report = join(outdir, 'zaire_ebola.kraken-report')
    kraken.classify(input_bam, kraken_db, out)
    kraken.filter(out, kraken_db, out_filtered, 0.05)
    kraken.report(out_filtered, kraken_db, out_report)
    assert os.path.getsize(out_report) > 0
    assert os.path.getsize(out_filtered) > 0


@pytest.mark.skipif(tools.is_osx(),
                    reason="kraken osx binary does not yet exist on bioconda")
コード例 #15
0
# Unit tests for vphaser tool

__author__ = "*****@*****.**"

import os
import pickle
import unittest
import util.file
import tools
from intrahost import vphaser_main
from test import TestCaseWithTmp


@unittest.skipIf(tools.is_osx(), "vphaser2 osx binary from bioconda has issues")
class TestVPhaser2(TestCaseWithTmp):

    def test_vphaser2(self):
        myInputDir = util.file.get_test_input_path(self)
        inBam = os.path.join(myInputDir, 'in.bam')
        outTab = util.file.mkstempfname('.txt')
        vphaser_main(inBam, outTab, numThreads=8)
        with open(outTab, 'rt') as outf:
            recs = map(lambda s: s.strip('\n').split('\t'), outf.readlines())
        with open(os.path.join(myInputDir, 'expected.cpickle'), 'rb') as expf:
            expectedRecs = pickle.load(expf)
        # Vphaser2 p-val calculation is unstable and sometimes varies from
        # run to run, so exclude it from comparison.
        self.assertEqual([rec[:4] + rec[5:] for rec in recs], [rec[:4] + rec[5:] for rec in expectedRecs])
        """
        Creation of in.bam:
        Start with test file that ships with V-Phaser 2.