def patchLibs(libdir):
    """Make sure that libraries can by dynamically loaded. This is a brute force approach"""

    saveDir = os.getcwd()

    # fix libVitaCFilters.dylib
    dir = os.path.join(
        libdir,
        "python%s/site-packages/vitamind/analyticsLib/pipelineElements" %
        pythonVersion)
    os.chdir(dir)
    # Fix up library references in libVitaCFilters.dylib
    lib = "libVitaCFilters.dylib"
    utils.runCommand([
        "install_name_tool", "-change",
        "/tmp/external.buildaccount/lib/libcv.1.dylib", "./libcv.1.dylib", lib
    ])
    utils.runCommand([
        "install_name_tool", "-change",
        "/tmp/external.buildaccount/lib/libcxcore.1.dylib",
        "./libcxcore.1.dylib", lib
    ])

    for lib in ["libcv.1.dylib", "libcxcore.1.dylib"]:
        try:
            os.remove(lib)
        except:
            pass

        os.symlink("../../../opencv/%s" % lib, lib)

    os.chdir(saveDir)
Exemplo n.º 2
0
 def zipFasta(self):
     """
     Compress the fasta file
     """
     utils.log("zipping {} ...".format(self.fastaFileName))
     cmd = "bgzip -f {}".format(self.fastaFileName)
     utils.runCommand(cmd)
def patchLibs(libdir):
  """Make sure that libraries can by dynamically loaded. This is a brute force approach"""

  saveDir = os.getcwd()

  # fix libVitaCFilters.dylib
  dir = os.path.join(
    libdir,
    "python%s/site-packages/vitamind/analyticsLib/pipelineElements" %
    pythonVersion)
  os.chdir(dir)
  # Fix up library references in libVitaCFilters.dylib
  lib = "libVitaCFilters.dylib"
  utils.runCommand(["install_name_tool", "-change", "/tmp/external.buildaccount/lib/libcv.1.dylib", "./libcv.1.dylib", lib])
  utils.runCommand(["install_name_tool", "-change", "/tmp/external.buildaccount/lib/libcxcore.1.dylib", "./libcxcore.1.dylib", lib])

  for lib in ["libcv.1.dylib", "libcxcore.1.dylib"]:
    try:
      os.remove(lib)
    except:
      pass

    os.symlink("../../../opencv/%s" % lib, lib)
  
  os.chdir(saveDir)
Exemplo n.º 4
0
 def _downloadFasta(self, chromosome):
     accession = self.accessions[chromosome]
     fileName = '{}.fa'.format(chromosome)
     minPos = 0
     if self.excludeReferenceMin:
         minPos = self.chromMinMax.getMinPos(chromosome)
     maxPos = self.chromMinMax.getMaxPos(chromosome)
     with open(fileName, "w") as outFasta:
         print(">{}".format(chromosome), file=outFasta)
         sequence = _fetchSequence(accession, minPos, maxPos)
         for line in sequence:
             print(line, file=outFasta)
     utils.log("Compressing {}".format(fileName))
     utils.runCommand("bgzip -f {}".format(fileName))
     compressedFileName = fileName + '.gz'
     utils.log("Indexing {}".format(compressedFileName))
     utils.runCommand("samtools faidx {}".format(compressedFileName))
     # Assemble the metadata.
     metadata = {
         "md5checksum": getReferenceChecksum(compressedFileName),
         "sourceUri": None,
         "ncbiTaxonId": 9606,
         "isDerived": False,
         "sourceDivergence": None,
         "sourceAccessions": [accession + ".subset"],
     }
     metadataFilename = "{}.json".format(chromosome)
     dumpDictToFileAsJson(metadata, metadataFilename)
Exemplo n.º 5
0
 def _downloadFasta(self, chromosome):
     accession = self.accessions[chromosome]
     fileName = '{}.fa'.format(chromosome)
     minPos = 0
     if self.excludeReferenceMin:
         minPos = self.chromMinMax.getMinPos(chromosome)
     maxPos = self.chromMinMax.getMaxPos(chromosome)
     with open(fileName, "w") as outFasta:
         print(">{}".format(chromosome), file=outFasta)
         sequence = _fetchSequence(accession, minPos, maxPos)
         for line in sequence:
             print(line, file=outFasta)
     utils.log("Compressing {}".format(fileName))
     utils.runCommand("bgzip -f {}".format(fileName))
     compressedFileName = fileName + '.gz'
     utils.log("Indexing {}".format(compressedFileName))
     utils.runCommand("samtools faidx {}".format(compressedFileName))
     # Assemble the metadata.
     metadata = {
         "md5checksum": getReferenceChecksum(compressedFileName),
         "sourceUri": None,
         "ncbiTaxonId": 9606,
         "isDerived": False,
         "sourceDivergence": None,
         "sourceAccessions": [accession + ".subset"],
     }
     metadataFilename = "{}.json".format(chromosome)
     dumpDictToFileAsJson(metadata, metadataFilename)
Exemplo n.º 6
0
    def postProcess(self, videos, videoFile):

        if len(videos) is 0:
            return

        # Merge all videos
        configPath = os.path.join(OutputPath.DATA_OUTPUT_PATH, 'video.txt')

        with open(configPath, 'w') as fp:
            for video in videos:
                fp.write('file \'{}\'\n'.format(video))
                fp.write('file \'{}\'\n'.format(self.separatorPath))
                fp.write('file \'{}\'\n'.format(self.separatorPath))

        videoPath = os.path.join(OutputPath.DATA_OUTPUT_PATH, 'all.mp4')

        print('Merge all to', videoPath, 'from', configPath)
        cmd = 'ffmpeg -y -f concat -safe 0 -i {} -c copy {}'.format(configPath, videoPath)

        runCommand(cmd)

        # Add logo
        self.videoPath = videoFile

        if self.logo:

            print('Add logo to', self.videoPath)
            cmd = 'ffmpeg -y -i {} -i {} -max_muxing_queue_size 10240 -filter_complex "overlay=10:10" {}'.format(videoPath,
                    self.logo, self.videoPath)
        else:
            print('Rename', videoPath, 'to', self.videoPath)
            cmd = 'mv {} {}'.format(videoPath, self.videoPath)

        runCommand(cmd)
Exemplo n.º 7
0
def align2sam(command, reference, fastq_file, sai_fastq_file, fastq_metadata, output_dir):
    """
    Convert alignments to SAM format. Turn bwa sai alignments into a sam file.
    It uses bwa samse commandline.
    """
    (path, name, ext) = splitPath(sai_fastq_file)
    if ext != '.sai':
        sys.exit('align2Sam: alignment file %s does not have .sai extension' % sai_fastq_file)

    sam_file = os.path.join(output_dir, os.path.splitext(os.path.basename(fastq_file))[0]) +  '.sam'
    sample =  fastq_metadata[os.path.basename(fastq_file)]['sample']
    run_id =  fastq_metadata[os.path.basename(fastq_file)]['run_id']
    lane =   fastq_metadata[os.path.basename(fastq_file)]['lane']
    identifier =  fastq_metadata[os.path.basename(fastq_file)]['identifier']
    readgroup_metadata = {'PL': 'ILLUMINA', 'SM': sample,
                            'LB': '%s_%s_%s_Lane%s' % (identifier, sample, run_id, lane),
                            'ID':  '%s_%s_%s_Lane%s' % (identifier, sample, run_id, lane) }
    metadata_str = make_metadata_string(readgroup_metadata)

    command =  command % {'out': sam_file, 'ref': reference, 'align': sai_fastq_file,
                                      'seq': fastq_file, 'meta': metadata_str}

    runCommand('bwa samse alignment from fastq: %s' % sample, command)

    return sam_file
Exemplo n.º 8
0
    def createSlider(self):

        self.imagePath = os.path.join(self.path, 'image.mp4')

        if self.imageCount is 0:
            print('Create slider to', self.imagePath, 'from', self.background)

            # TODO: Use background as image
            cmd = 'ffmpeg -y -loop 1 -i {} -c:v libx264 -t {:.2f} -pix_fmt yuv420p {}'.format(self.background,
                    self.length, self.imagePath)

            runCommand(cmd)

            return

        duration = self.length / self.imageCount
        videoMaker = None

        for index in range(self.imageCount):

            imagePath = os.path.join(self.path, '{}.jpg'.format(index))

            if not os.path.exists(imagePath):
                continue

            imageVideoPath = '{}.mp4'.format(imagePath)
            VideoKit.createLoopVideo(imageVideoPath, imagePath, duration)

            videoMaker = VideoKit.appendVideo(imageVideoPath, videoMaker)

        videoMaker.merge(self.imagePath)
Exemplo n.º 9
0
def fastqc(command, sequences, fastq_metadata, output_dir):
    '''
    Run FastQC on each fastq file.
    '''
    for fastq_file in sequences:
        command = command % {'outdir': output_dir, 'seq': fastq_file}
        runCommand('Checking fastq quality', command)
Exemplo n.º 10
0
def align_with_mem(command, threads, reference, fastq_file, pair_file, fastq_metadata, output_dir):
    '''
    Perform alignment on two paired-end fastq files to a reference genome to produce a sam file.
    '''
    (path, name, ext) = splitPath(fastq_file)
    (pathP, nameP, extP) = splitPath(pair_file)

    if ext != '.fastq' or extP != '.fastq':
        sys.exit('align: one of the fastq file %s or %s does not have .fastq extension' % (fastq_file, pair_file))

    sam_file = os.path.join(output_dir, os.path.splitext(os.path.basename(fastq_file))[0]) +  '.sam'
    sample =  fastq_metadata[os.path.basename(fastq_file)]['sample']
    run_id =  fastq_metadata[os.path.basename(fastq_file)]['run_id']
    lane =   fastq_metadata[os.path.basename(fastq_file)]['lane']
    identifier =  fastq_metadata[os.path.basename(fastq_file)]['identifier']
    readgroup_metadata = {'PL': 'ILLUMINA', 'SM': sample,
                            'LB': '%s_%s_%s_Lane%s' % (identifier, sample, run_id, lane),
                            'ID':  '%s_%s_%s_Lane%s' % (identifier, sample, run_id, lane) }
    metadata_str = make_metadata_string(readgroup_metadata)

    command = command % {'threads': threads, 'meta': metadata_str, 'ref': reference,
                                'seq': fastq_file , 'pair': pair_file, 'out': sam_file}
    runCommand('bwa mem alignment from fastq: %s' % sample, command)

    return sam_file
Exemplo n.º 11
0
 def zipFasta(self):
     """
     Compress the fasta file
     """
     utils.log("zipping {} ...".format(self.fastaFileName))
     cmd = "bgzip -f {}".format(self.fastaFileName)
     utils.runCommand(cmd)
Exemplo n.º 12
0
def create_executable(trunk_dir, script_dir, work_dir, target):
    # Go to target dir and verify there is no executable yet
    os.chdir(work_dir)
    if os.path.exists('VisionToolkit.exe'):
        os.remove('VisionToolkit.exe')

    nsis = os.path.join(trunk_dir,
                        'external/win32/lib/buildtools/NSIS/makensis.exe')

    # Copy the NSIS script to work_dir because that's where it will execute
    shutil.copy(os.path.join(script_dir, 'vision_toolkit.nsi'),
                'vision_toolkit.nsi')
    assert os.path.isfile(nsis)

    # Build the NSIS command line
    cmd = [nsis, 'vision_toolkit.nsi']
    #print ' '.join(cmd)

    # Launch NSIS and verify that the final executable has been created
    #subprocess.call(cmd)
    import utils
    import logging
    # log level was earlier set to info. We want all output from this command
    logging.getLogger('').setLevel(logging.DEBUG)
    utils.runCommand(cmd)
    assert os.path.isfile('VisionToolkit.exe')

    # Rename to target name
    try:
        shutil.move('VisionToolkit.exe', target)
    except Exception, e:
        print e
        raise
Exemplo n.º 13
0
def build_win32(srcdir, builddir, installdir, assertions, customerRelease):


  log.info("build_win32: srcdir = '%s'", srcdir)
  log.info("build_win32: builddir = '%s'", builddir)
  log.info("build_win32: installdir = '%s'", installdir)
  # deprecated
  os.environ["NTA"] = installdir
  # These are what I would like to use. Currently only used by the test project
  os.environ["NTAX_INSTALL_DIR"] = installdir
  os.environ["NTAX_BUILD_DIR"] = builddir

  log.debug("build_win32: srcdir: '%s'", srcdir)
  log.debug("build_win32: installdir: '%s'", installdir)
  log.debug("build_win32: builddir: '%s'", builddir)

  # how to quote "Release|Win32" so that it doesn't cause an error?
  # command = ["vcbuild", "/logcommands", "/showenv", "/time", os.path.join(srcdir, "trunk.sln")]
  utils.changeDir(srcdir)
  if customerRelease:
    import glob
    solutionFiles = glob.glob("*.sln")
    if len(solutionFiles) == 0:
      raise Exception("Unable to find any solution files in customer source release")
    elif len(solutionFiles) > 1:
      raise Exception("More than one solution file found in customer source release: %s" % solutionFiles)
    command = 'vcbuild /logcommands /showenv /time %s "Release|Win32"' % solutionFiles[0]
  else:
    command = 'vcbuild /logcommands /showenv /time trunk.sln "Release|Win32"'
  utils.runCommand(command)

  postbuild_win32(srcdir, installdir)
Exemplo n.º 14
0
 def extract(self, runLogDir):
     if os.path.exists(self.extract_dir):
         shutil.rmtree(self.extract_dir)
     os.makedirs(self.extract_dir)
     utils.runCommand(
         'tar xvf %s -C %s --strip-components=1 > %s/extract%s.log.txt 2>&1'
         % (self.tgz, self.extract_dir, runLogDir, self.name))
Exemplo n.º 15
0
def run(configfile, name):

    OutputPath.init(configFile)

    thread = ThreadWritableObject(configFile, name)
    thread.start()

    sys.stdout = thread
    sys.errout = thread # XXX: Actually, it does NOT work

    try:

        db = Database(configFile, 'specials')
        db.initialize()

        evaluation = Evaluation(configFile, db)

        evaluation.updateOverdue()

        path = OutputPath.getSharePath()
        sharePath = getProperty(configFile, 'output-share-file')

        cmd = '/bin/rm -f {1} && /bin/ln -s {0} {1}'.format(path, sharePath)
        runCommand(cmd)

        data = evaluation.output()

        with open(path, 'w') as fp:
            fp.write(reprDict(data))

    except KeyboardInterrupt:
        pass
    except Exception, e:
        print 'Error occurs at', datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        traceback.print_exc(file=sys.stdout)
Exemplo n.º 16
0
def createMovie(encoder='ffmpeg'):
    """Create a movie from a saved sequence of images.

    encoder is one of: 'ffmpeg, mencoder, convert'
    """
    if not multisave:
        pf.warning('You need to start multisave mode first!')
        return

    names,format,quality,window,border,hotkey,autosave,rootcrop = multisave
    glob = names.glob()
    ## if glob.split('.')[-1] != 'jpg':
    ##     pf.warning("Currently you need to save in 'jpg' format to create movies")
    ##     return

    if encoder == 'convert':
        cmd = "convert -delay 1 -colors 256 %s output.gif" % names.glob()
    elif encoder == 'mencoder':
        cmd = "mencoder -ovc lavc -fps 5 -o output.avi %s" % names.glob()
    elif encoder == 'mencoder1':
        cmd = "mencoder \"mf://%s\" -mf fps=10 -o output1.avi -ovc lavc -lavcopts vcodec=msmpeg4v2:vbitrate=800" % names.glob()
    else:
        cmd = "ffmpeg -qscale 1 -r 1 -i %s output.mp4" % names.glob()
    pf.debug(cmd)
    utils.runCommand(cmd)
def createVrtForAOI(fvrt, year, area):
    verbose = CONFIG.get('verbose', False)

    # temp file to write doqqs into
    fvrtin = fvrt + '.in'

    # temp file for intermeadiate vrt
    fvrtvrt = fvrt + '.vrt'

    # get a list of doqqs the intersect our area of interest
    # and write them to a temp file
    files = getDoqqsForArea(year, area)
    fh = open(fvrtin, 'wb')
    for f in files:
        fh.write(f + "\n")
    fh.close()

    # create the intermeadiate vrt file
    cmd = ['gdalbuildvrt', '-input_file_list', fvrtin, fvrtvrt]
    runCommand(cmd, verbose)

    # create the final vrt file with appropriate mask band defined
    cmd = [
        'gdal_translate', '-b', '1', '-b', '2', '-b', '3', '-b', '5', '-mask',
        '4', '-of', 'VRT', fvrtvrt, fvrt
    ]
    runCommand(cmd, verbose)
Exemplo n.º 18
0
def create_executable(trunk_dir, script_dir, work_dir, target):
  # Go to target dir and verify there is no executable yet
  os.chdir(work_dir)
  if os.path.exists('VisionDemo.exe'):
    os.remove('VisionDemo.exe')

  nsis = os.path.join(trunk_dir,
                      'external/win32/lib/buildtools/NSIS/makensis.exe')

  # Copy the NSIS script to work_dir because that's where it will execute
  shutil.copy(os.path.join(script_dir, 'demo.nsi'), 'demo.nsi')
  assert os.path.isfile(nsis)

  # Build the NSIS command line
  cmd = [nsis, 'demo.nsi']
  #print ' '.join(cmd)

  # Launch NSIS and verify that the final executable has been created
  #subprocess.call(cmd)
  import utils
  import logging
  # log level was earlier set to info. We want all output from this command
  logging.getLogger('').setLevel(logging.DEBUG)
  utils.runCommand(cmd)
  assert os.path.isfile('VisionDemo.exe')

  # Rename to target name
  try:
    shutil.move('VisionDemo.exe', target)
  except Exception, e:
    print e
    print
Exemplo n.º 19
0
 def setLogLevel(self, level):
     """Sets the module log level."""
     if level != Defaults.vdoLogLevel:
         runCommand(string.split("echo" + level + " > /sys/" + self._name +
                                 "/log_level"),
                    shell=True,
                    noThrow=True)
Exemplo n.º 20
0
  def execute(self, args):
    #pylint: disable=R0201
    conf = Configuration(self.confFile, readonly=False)
    if not args.name:
      args.all = True

    try:
      print(_("VDO status:"))
      print(_("  Node: ")
            + runCommand(['uname', '-n'], noThrow=True, strip=True))
      print(_("  Date: ")
            + runCommand(['date', '--rfc-3339=seconds'], noThrow=True,
                         strip=True))
      if os.getuid() != 0:
        print(_("  Note: not running as root,"
                + " some status may be unavailable"))
      print(os.linesep.join(VDOKernelModuleService().status("")))

      print(os.linesep.join(conf.status("")))

      print(_("VDOs: "))
      for vdo in self.getVdoServices(args, conf):
        print(os.linesep.join(vdo.status("  ")))
      sys.stdout.flush()
      sys.stderr.flush()
    except IOError as ex:
      self.log.debug("exception ignored: {0}".format(ex))
Exemplo n.º 21
0
Arquivo: mpc.py Projeto: avida/webguy
def runPlayer(path):
    #runCommand("export DISPLAY=:0;sudo -u dima vlc -f \"%s\" &" % path)
    #runCommand("sudo -u dima totem --fullscreen --display=:0 \"%s\"" % path)
    #runCommand("export DISPLAY=:0; sudo -u dima mplayer --fs \"%s\"" % path)
    #runCommand(r'"c:\Program Files (x86)\vlc-2.2.0\vlc.exe" -f "%s"' % path)
    runCommand(
        r'"c:\Program Files (x86)\K-Lite Codec Pack\MPC-HC64\mpc-hc64.exe" /fullscreen "%s"' % path)
Exemplo n.º 22
0
def cleanNoPysvn(dir,  doCleanup=True):
  if doCleanup == False:
    utils.runCommand("svn status --no-ignore %s" % dir)
  else:
    # Delete everythiung svn doesn't know about *except* for the top level directory, since svn can 
    # reports the top level directory as "!" (missing") if a sub-directory is missing. 
    # Use the xml format to avoid problems with spaces in filenames
    utils.runCommand("svn status --no-ignore --xml %s | grep -A1 entry | grep path= | awk -F= '{print $2}' | sed 's/>//' | grep -v \\\"%s\\\" | xargs rm -rvf" % (dir, dir), logOutputOnlyOnFailure=False)
Exemplo n.º 23
0
    def merge(dstVideoPath, srcVideoPath, srcAudioPath):

        print('Merge', srcVideoPath, 'and', srcAudioPath, 'to', dstVideoPath)

        cmd = 'ffmpeg -y -i {} -i {} -c copy -map \'0:v:0\' -map \'1:a:0\' {}'.format(
            srcVideoPath, srcAudioPath, dstVideoPath)

        runCommand(cmd)
Exemplo n.º 24
0
 def indexFasta(self):
     """
     Create index on the fasta file
     """
     zipFileName = "{}.gz".format(self.fastaFileName)
     utils.log("indexing {} ...".format(zipFileName))
     cmd = "samtools faidx {}".format(zipFileName)
     utils.runCommand(cmd)
Exemplo n.º 25
0
 def indexFasta(self):
     """
     Create index on the fasta file
     """
     zipFileName = "{}.gz".format(self.fastaFileName)
     utils.log("indexing {} ...".format(zipFileName))
     cmd = "samtools faidx {}".format(zipFileName)
     utils.runCommand(cmd)
Exemplo n.º 26
0
def compressSplits(splitFileNames):
    compressedFileNames = []
    for splitFileName in splitFileNames:
        utils.log("Compressing {}".format(splitFileName))
        cmd = "bgzip {}".format(splitFileName)
        utils.runCommand(cmd)
        compressedFileName = "{}.gz".format(splitFileName)
        compressedFileNames.append(compressedFileName)
    return compressedFileNames
Exemplo n.º 27
0
def compressSplits(splitFileNames):
    compressedFileNames = []
    for splitFileName in splitFileNames:
        utils.log("Compressing {}".format(splitFileName))
        cmd = "bgzip {}".format(splitFileName)
        utils.runCommand(cmd)
        compressedFileName = "{}.gz".format(splitFileName)
        compressedFileNames.append(compressedFileName)
    return compressedFileNames
Exemplo n.º 28
0
def decompressFasta(args):
    fastaFileName = args.fastaFile
    filename, extension = os.path.splitext(fastaFileName)
    if extension == '.gz':
        utils.log("Decompressing {}".format(fastaFileName))
        cmd = "gunzip {}".format(fastaFileName)
        utils.runCommand(cmd)
        fastaFileName = filename
    return fastaFileName
Exemplo n.º 29
0
def decompressFasta(args):
    fastaFileName = args.fastaFile
    filename, extension = os.path.splitext(fastaFileName)
    if extension == '.gz':
        utils.log("Decompressing {}".format(fastaFileName))
        cmd = "gunzip {}".format(fastaFileName)
        utils.runCommand(cmd)
        fastaFileName = filename
    return fastaFileName
def ZipDir(zipFile, directory):
  if HOST_OS == 'win':
    cmd = os.path.normpath(os.path.join(
        os.path.dirname(__file__),
        '../../../third_party/lzma_sdk/Executable/7za.exe'))
    options = ['a', '-r', '-tzip']
  else:
    cmd = 'zip'
    options = ['-yr']
  utils.runCommand([cmd] + options + [zipFile, directory])
Exemplo n.º 31
0
def applyRules(rule):
    print 'applying rule %s' % rule
    if runCommand(rule):
        print 'command <%s> execute failed' % rule
        return False
    rule = 'iptables-save > /etc/sysconfig/iptables'
    if runCommand(rule):
        print 'command <%s> execute failed' % rule
        return False
    return True
Exemplo n.º 32
0
    def run_protocol(self):
        #print("data",self.alignerList[0])
        #if not os.path.exists("indexes"):
        #	os.mkdir("indexes")
        #os.chdir("indexes")

        #for genome in self.inputfile:

        for aligner in self.alignersList:

            #if os.path.exists(dir):
            #	continue
            #os.mkdir(dir)
            #os.chdir(dir)
            if aligner.upper() == "HISAT":
                objhisat = hisat(inputfile=self.inputfile,
                                 workingpath="aligners/HISAT")
                if (self.parameters[0]['build_index']):
                    runCommand(
                        objhisat.build_command
                    )  #go to the specific directory and run the build command

                    print("Building index file completed.")
                    deleteFile(os.path.join(
                        "/workingpath/",
                        "abc"))  #delete the inputfile to save the space
                #print("if 1",aligner.upper())

                #print(cmd)

            elif aligner.upper() == "HISAT2":
                objhisat2 = hisat2(inputfile=self.inputfile,
                                   workingpath="aligners/HISAT2")
                if (self.parameters[1]['build_index']):
                    runCommand(objhisat2.build_command)

                    print("Building index file completed.")
                    deleteFile(os.path.join(
                        "/workingpath/",
                        "abc"))  #delete the inputfile to save the space

            elif aligner == "BOWTIE":
                cmd = "bowtie-build ../data/%s %s" % (self.inputfile,
                                                      self.inputfile)

            elif aligner == "STAR":
                cmd = "../../aligners/bin/STAR --runMode genomeGenerate --genomeDir . --genomeFastaFiles ../../data/%s.fa" % (
                    self.inputfile)

            elif aligner == "GSNAP":
                cmd = "../../aligners/bin/gmap_build -B ../../aligners/bin -D . -d %s ../../data/%s.fa" % (
                    self.inputfile, self.inputfile)
            else:
                print("Aligner:", aligner, " does not exist.")
                assert False
Exemplo n.º 33
0
def install_vitamind(target_dir):
    """Run the install_vitamind.py script

  Installs ffmpeg, scipy and vitamind obfuscated pipeline
  to the target dir
  """
    print 'install_vitamind() into', target_dir
    save_dir = os.getcwd()
    os.chdir(os.path.join(trunk_dir, 'external/src/python_modules/vitamind'))
    utils.runCommand(['python', 'install_vitamind.py', target_dir])
    os.chdir(save_dir)
def installVitamind(trunkDir, installDir):
    """Run the installVitamind.py script

  Installs ffmpeg, scipy and vitamind obfuscated pipeline
  to the install dir
  """
    print 'installVitamind()'
    saveDir = os.getcwd()
    os.chdir(os.path.join(trunkDir, 'external/src/python_modules/vitamind'))
    utils.runCommand(['python', 'install_vitamind.py', "--force", installDir])
    os.chdir(saveDir)
Exemplo n.º 35
0
 def runTests(self):
     testCommands = self.parseTestCommands()
     for command in testCommands:
         expandedCommand = self.expandCommand(command)
         self.log('Running: "{0}"'.format(expandedCommand))
         try:
             utils.runCommand(expandedCommand)
         except subprocess.CalledProcessError:
             self.log('ERROR')
             return
     self.log('SUCCESS')
Exemplo n.º 36
0
def install_vitamind(target_dir):
  """Run the install_vitamind.py script

  Installs ffmpeg, scipy and vitamind obfuscated pipeline
  to the target dir
  """
  print 'install_vitamind() into', target_dir
  save_dir = os.getcwd()
  os.chdir(os.path.join(trunk_dir, 'external/src/python_modules/vitamind'))
  utils.runCommand(['python', 'install_vitamind.py', target_dir])
  os.chdir(save_dir)
def installVitamind(trunkDir, installDir):
  """Run the installVitamind.py script

  Installs ffmpeg, scipy and vitamind obfuscated pipeline
  to the install dir
  """
  print 'installVitamind()'
  saveDir = os.getcwd()
  os.chdir(os.path.join(trunkDir, 'external/src/python_modules/vitamind'))
  utils.runCommand(['python', 'install_vitamind.py', "--force", installDir])
  os.chdir(saveDir)
Exemplo n.º 38
0
    def createLoopVideo(dstVideoPath, srcImagePath, videoLength):

        print('Create video to', dstVideoPath, 'from', srcImagePath,
              'with length', videoLength)

        cmd = 'ffmpeg -y -loop 1 -i {} -c:v libx264 -t {:.2f} -pix_fmt yuv420p {}'.format(
            srcImagePath, videoLength, dstVideoPath)

        runCommand(cmd)

        return dstVideoPath
Exemplo n.º 39
0
 def running(self, wait=True):
   """Returns True if the module is loaded and DM target is available."""
   retries = 20 if wait else 1
   try:
     runCommand(string.split("lsmod | grep -q '" + self._name + "'"),
                shell=True, retries=retries)
     runCommand(string.split("dmsetup targets | grep -q " 
                             + Defaults.vdoTargetName),
                shell=True, retries=retries)
     return True
   except CommandError:
     return False
Exemplo n.º 40
0
 def _fsyncDirectory(self):
     """Open and issue an fsync on the directory containing the config file.
 """
     dirname = os.path.dirname(self.filepath)
     if Command.noRunMode():
         runCommand(['fsync', dirname])
         return
     fd = os.open(dirname, os.O_RDONLY)
     try:
         os.fsync(fd)
     finally:
         os.close(fd)
Exemplo n.º 41
0
def read_gambit_neutral(fn):
    """Read a triangular surface mesh in Gambit neutral format.

    The .neu file nodes are numbered from 1!
    Returns a nodes,elems tuple.
    """
    runCommand("%s/external/gambit-neu '%s'" % (GD.cfg['pyformexdir'],fn))
    nodesf = changeExt(fn,'.nodes')
    elemsf = changeExt(fn,'.elems')
    nodes = fromfile(nodesf,sep=' ',dtype=Float).reshape((-1,3))
    elems = fromfile(elemsf,sep=' ',dtype=int32).reshape((-1,3))
    return nodes, elems-1
Exemplo n.º 42
0
def annotate(command, annovar_dir, annovar_file, output_dir):
    '''
    Annotate vcf using Annovar variant caller.
    '''
    (path, name, ext) =  splitPath(annovar_file)
    if ext != '.avinput':
        sys.exit('Annotating vcf: vcf file %s does not have .avinput extension' % annovar_file)
    out_prefix = os.path.join(output_dir, name)
    command = command % {'out': out_prefix, 'annovarfile': annovar_file, 'annovardir': annovar_dir}
    runCommand('Annotating with Annovar', command)

    return out_prefix
Exemplo n.º 43
0
def convert2annovar(command, annovar_dir, vcf, output_dir):
    '''
    Convert vcf file to Annovar variant caller .annovar
    '''
    (path, name, ext) =  splitPath(vcf)
    if ext != '.vcf':
        sys.exit('Converting to .annovar: vcf file %s does not have .vcf extension' % vcf)
    out_prefix = os.path.join(output_dir, name.split('.')[0])
    command = command % {'out': out_prefix, 'vcf': vcf, 'annovardir': annovar_dir}
    runCommand('Coverting to .annovar format', command)

    return '.'.join([out_prefix, name.split('.')[0],  'avinput'])
Exemplo n.º 44
0
def make_reference_database(command, algorithm, reference):
    '''
    Create reference database
    '''
    ref_database = reference + '.bwt'
    if os.path.exists(ref_database):
        print('Reference database already exists: using %s' % ref_database)
    else:
        command = command % {'algorithm': algorithm, 'prefix': reference, 'seq': reference + '.fasta'}
        runCommand('Creating Reference Database', command)

    return ref_database
Exemplo n.º 45
0
def align(command, threads, reference, sequence, fastq_metadata, output_dir):
    '''
    Align sequence reads to the reference genome. This is the bwa's first stage, bwa aln.
    '''
    (path, name, ext) = splitPath(sequence)
    if ext != '.fastq':
        sys.exit('align: sequence file %s does not have .fastq extension' % sequence)
    alignment_file = os.path.join(output_dir, name + '.sai')
    command = command % {'out': alignment_file, 'threads': int(threads), 'ref': reference,
                    'seq': sequence, 'encodingflag': ''}
    runCommand('Running Alignment', command)

    return alignment_file
Exemplo n.º 46
0
def read_gambit_neutral(fn):
    """Read a triangular surface mesh in Gambit neutral format.

    The .neu file nodes are numbered from 1!
    Returns a nodes,elems tuple.
    """
    scr = os.path.join(pf.cfg['bindir'], 'gambit-neu ')
    utils.runCommand("%s '%s'" % (scr, fn))
    nodesf = utils.changeExt(fn, '.nodes')
    elemsf = utils.changeExt(fn, '.elems')
    nodes = fromfile(nodesf, sep=' ', dtype=Float).reshape((-1, 3))
    elems = fromfile(elemsf, sep=' ', dtype=int32).reshape((-1, 3))
    return nodes, elems - 1
Exemplo n.º 47
0
def summarize(command, annovar_dir, annovar_file, ver1000g, veresp, verdbsnp, genetype, buildver, output_dir):
    '''
    Summarize information with Annovar.
    '''
    (path, name, ext) =  splitPath(annovar_file)
    if ext != '.avinput':
        sys.exit('Summarizing annotations: vcf file %s does not have .avinput extension' % annovar_file)
    out_prefix = os.path.join(output_dir, name)
    command = command % {'out': out_prefix, 'annovarfile': annovar_file, 'annovardir': annovar_dir,
                'ver1000g': ver1000g, 'veresp': veresp, 'verdbsnp': verdbsnp, 'genetype': genetype, 'buildver': buildver}
    runCommand('Summarizing with Annovar', command)

    return out_prefix
Exemplo n.º 48
0
def read_gambit_neutral(fn):
    """Read a triangular surface mesh in Gambit neutral format.

    The .neu file nodes are numbered from 1!
    Returns a nodes,elems tuple.
    """
    scr = os.path.join(pf.cfg['bindir'],'gambit-neu ')
    utils.runCommand("%s '%s'" % (scr,fn))
    nodesf = utils.changeExt(fn,'.nodes')
    elemsf = utils.changeExt(fn,'.elems')
    nodes = fromfile(nodesf,sep=' ',dtype=Float).reshape((-1,3))
    elems = fromfile(elemsf,sep=' ',dtype=int32).reshape((-1,3))
    return nodes, elems-1
Exemplo n.º 49
0
def stl_to_off(stlname,offname=None,sanitize=True):
    """Transform an .stl file to .off format."""
    if not offname:
        offname = changeExt(stlname,'.off')
    if sanitize:
        options = ''
    else:
        # admesh always wants to perform some actions on the STL. The -c flag
        # to suppress all actions makes admesh hang. Therefore we include the
        # action -d (fix normal directions) as the default.
        options = '-d'    
    runCommand("admesh %s --write-off '%s' '%s'" % (options,offname,stlname))
    return offname
Exemplo n.º 50
0
def read_ascii_large(fn,dtype=Float):
    """Read an ascii .stl file into an [n,3,3] float array.

    This is an alternative for read_ascii, which is a lot faster on large
    STL models.
    It requires the 'awk' command though, so is probably only useful on
    Linux/UNIX. It works by first transforming  the input file to a
    .nodes file and then reading it through numpy's fromfile() function.
    """
    tmp = '%s.nodes' % fn
    runCommand("awk '/^[ ]*vertex[ ]+/{print $2,$3,$4}' '%s' | d2u > '%s'" % (fn,tmp))
    nodes = fromfile(tmp,sep=' ',dtype=dtype).reshape((-1,3,3))
    return nodes
Exemplo n.º 51
0
    def execute(self, args):
        #pylint: disable=R0201
        conf = Configuration(self.confFile, readonly=False)
        if not args.name:
            args.all = True

        try:
            # To be consistent with previous output we must present each section as
            # its own rather than organizing them into one structure to dump.
            # Also, we gather all the info before printing it out to avoid
            # interspersing command info when run in verbose mode.
            values = {}
            vdoStatus = {_("VDO status"): values}
            values[_("Node")] = runCommand(['uname', '-n'],
                                           noThrow=True,
                                           strip=True)
            values[_("Date")] = runCommand(['date', '--rfc-3339=seconds'],
                                           noThrow=True,
                                           strip=True)
            if os.getuid() != 0:
                values[_("Note")] = _("Not running as root," +
                                      " some status may be unavailable")

            kernelStatus = {
                _("Kernel module"): VDOKernelModuleService().status()
            }

            confStatus = {_("Configuration"): conf.status()}

            vdos = {}
            perVdoStatus = {_("VDOs"): vdos}
            for vdo in self.getVdoServices(args, conf):
                import time
                start_time = time.time()
                try:
                    vdos[vdo.getName()] = vdo.status()
                except VDOServiceError as ex:
                    vdos[vdo.getName()] = str(ex)
                print "time use: {}'s".format(time.time() - start_time)

            # YAML adds a newline at the end.  To maintain consistency with the
            # previous output we need to eliminate that.
            # print(yaml.dump(vdoStatus, default_flow_style = False)[:-1])
            # print(yaml.dump(kernelStatus, default_flow_style = False)[:-1])
            # print(yaml.dump(confStatus, default_flow_style = False)[:-1])
            # print(yaml.dump(perVdoStatus, default_flow_style = False)[:-1])

            sys.stdout.flush()
            sys.stderr.flush()
        except IOError as ex:
            self.log.debug("exception ignored: {0}".format(ex))
Exemplo n.º 52
0
def filter_snps(command, command_options, gatk_dir, reference, vcf, filter_expression, output_dir):
    '''
    Use GATK VariantFiltration to filter raw SNP calls.
    '''
    (path, name, ext) =  splitPath(vcf)
    command_options = command_options[1]
    if ext != '.vcf':
        sys.exit('filtering SNPs: vcf file %s does not have .vcf extension' % vcf)
    out_vcf = os.path.join(output_dir, name + '.filtered.vcf')
    command = command % {'jvmoptions': command_options, 'out': out_vcf,
                    'vcf': vcf, 'gatkdir': gatk_dir, 'ref': reference + '.fasta', 'expression': filter_expression}
    runCommand('Calling snps', command)

    return out_vcf
Exemplo n.º 53
0
def base_qual_recal_tabulate(command, command_options, gatk_dir, reference, recal_file, alignment, output_dir):
    '''
    GATK TableRecalibration: recalibrate base quality scores using the output of CountCovariates.
    '''
    (path, name, ext) =  splitPath(alignment)
    command_options = command_options[1]
    if ext != '.bam':
        sys.exit('table recalibration: alignment file %s does not have .bam extension' % alignment)
    recal_bam = os.path.join(output_dir, name + '.recal.bam')
    command = command % {'jvmoptions': command_options, 'out': recal_bam, 'recalfile': recal_file,
                            'bam': alignment, 'gatkdir': gatk_dir, 'ref': reference + '.fasta'}
    runCommand('recalibrate base quality scores', command)

    return recal_bam
Exemplo n.º 54
0
def read_gambit_neutral_hex(fn):
    """Read an hexahedral mesh in Gambit neutral format.

    The .neu file nodes are numbered from 1!
    Returns a nodes,elems tuple.
    """
    scr = os.path.join(pf.cfg['bindir'], 'gambit-neu-hex ')
    pf.message("%s '%s'" % (scr, fn))
    utils.runCommand("%s '%s'" % (scr, fn))
    nodesf = utils.changeExt(fn, '.nodes')
    elemsf = utils.changeExt(fn, '.elems')
    nodes = fromfile(nodesf, sep=' ', dtype=Float).reshape((-1, 3))
    elems = fromfile(fn_e, sep=' ', dtype=int32).reshape((-1, 8))
    elems = elems[:, (0, 1, 3, 2, 4, 5, 7, 6)]
    return nodes, elems - 1
Exemplo n.º 55
0
    def voiceOut(self, stringContent):

        result = self.aipSpeech.synthesis(stringContent, 'zh', 1, {
            'vol': 15,
            'per': 0
        })

        if not isinstance(result, dict):

            path = OutputPath.getDataPath('voice', 'wav')
            with open(path, 'wb') as fp:
                fp.write(result)

            cmd = 'mplayer {}'.format(path)
            runCommand(cmd)
Exemplo n.º 56
0
def remesh(self, edgelen=None):
    """Remesh a TriSurface.

    edgelen is the suggested edge length
    """
    if edgelen is None:
        self.getElemEdges()
        E = Mesh(self.coords, self.edges, eltype='line2')
        edgelen = E.lengths().mean()
    tmp = utils.tempFile(suffix='.stl').name
    tmp1 = utils.tempFile(suffix='.stl').name
    pf.message("Writing temp file %s" % tmp)
    self.write(tmp, 'stl')
    pf.message("Remeshing using VMTK (edge length = %s)" % edgelen)
    cmd = "vmtk vmtksurfaceremeshing -ifile %s -ofile %s -edgelength %s" % (
        tmp, tmp1, edgelen)
    sta, out = utils.runCommand(cmd)
    os.remove(tmp)
    if sta:
        pf.message("An error occurred during the remeshing.")
        pf.message(out)
        return None
    S = TriSurface.read(tmp1)
    os.remove(tmp1)
    return S