コード例 #1
0
def main(argv):
    # parse input arguments
    parser = argparse.ArgumentParser()
    parser.add_argument('-i', '--input',
                        help="the input YAML configuration file")
    parser.add_argument('-s', '--serial',
                        help="run in serial mode",
                        action="store_true")
    args = parser.parse_args()

    batchStart = time.time()

    Logger.logPrint("Starting batchprocess: "+args.input)
    Logger.logPrint("Start time:"+timeStr(batchStart))

    # load WormVideo to YAML configuration file
    with open(args.input, 'r') as f:
        wvs = wtc.loadWormVideos(f)

    # run analysis on regions
    if args.serial:
        for wv in wvs:
            wv.processRegions()
    else:
        wtp.batchProcessVideos(wvs)
    batchStop = time.time()
    Logger.logPrint("Start time:"+timeStr(batchStart))
    Logger.logPrint("End time  :"+timeStr(batchStop))
    Logger.logPrint("Total time:"+timeStr(batchStop-batchStart))

    return 'Success'
コード例 #2
0
def parallelProcessRegions(wormVideo):
    wormVideo.saveConfiguration()
    pool = multiprocessing.Pool()
    result = pool.map_async(processRegion, wormVideo.regions)
    Logger.logPrint(','.join([str(r) for r in result.get()]))
    pool.close()
    pool.join()
    Logger.logPrint('Finished analyzing all regions')
    cleanUpProcess(wormVideo)
コード例 #3
0
ファイル: parallel.py プロジェクト: stephenhelms/WormTracker
def parallelProcessRegions(wormVideo):
    wormVideo.saveConfiguration()
    pool = multiprocessing.Pool()
    result = pool.map_async(processRegion, wormVideo.regions)
    Logger.logPrint(','.join([str(r) for r in result.get()]))
    pool.close()
    pool.join()
    Logger.logPrint('Finished analyzing all regions')
    cleanUpProcess(wormVideo)
コード例 #4
0
def batchProcessVideos(wormVideos):
    pool = multiprocessing.Pool()
    results = []
    for video in wormVideos:
        video.saveConfiguration()
        results.append(pool.map_async(processRegion, video.regions))

    for result in results:
        for regionResult in result.get():
            Logger.logPrint(regionResult)
    pool.close()
    pool.join()
    Logger.logPrint('Finished analyzing all regions')
    for video in wormVideos:
        cleanUpProcess(video)
コード例 #5
0
ファイル: parallel.py プロジェクト: stephenhelms/WormTracker
def batchProcessVideos(wormVideos):
    pool = multiprocessing.Pool()
    results = []
    for video in wormVideos:
        video.saveConfiguration()
        results.append(pool.map_async(processRegion, video.regions))

    for result in results:
        for regionResult in result.get(): 
            Logger.logPrint(regionResult)
    pool.close()
    pool.join()
    Logger.logPrint('Finished analyzing all regions')
    for video in wormVideos:
        cleanUpProcess(video)
コード例 #6
0
def processRegion(region):
    Logger.logPrint('Starting analysis of {0} {1}'.format(
        region.strainName, region.wormName))
    try:
        # split output to a different file
        path, name = os.path.split(region.resultsStoreFile)
        newName = (region.strainName + '_' + region.wormName + '_' + name)
        region.resultsStoreFile = os.path.join(path, newName)
        tStart = time.clock()
        region.saveConfiguration()
        region.process()
        tFinish = time.clock()
        tDuration = (tFinish - tStart) / 60
        Logger.logPrint('Analysis of {0} {1} took {2} min.'.format(
            region.strainName, region.wormName, str(tDuration)))
        return 'Success'
    except (Exception) as e:
        Logger.logPrint('Error during analysis of {0}{1}: {2}'.format(
            region.strainName, region.wormName, str(e)))
        return 'Failed'
コード例 #7
0
ファイル: parallel.py プロジェクト: stephenhelms/WormTracker
def processRegion(region):
    Logger.logPrint('Starting analysis of {0} {1}'.format(region.strainName,
                                                region.wormName))
    try:
        # split output to a different file
        path, name = os.path.split(region.resultsStoreFile)
        newName = (region.strainName+'_'+region.wormName+'_'+name)
        region.resultsStoreFile = os.path.join(path, newName)
        tStart = time.clock()
        region.saveConfiguration()
        region.process()
        tFinish = time.clock()
        tDuration = (tFinish - tStart) / 60
        Logger.logPrint('Analysis of {0} {1} took {2} min.'.format(region.strainName,
                                                         region.wormName,
                                                         str(tDuration)))
        return 'Success'
    except(Exception) as e:
        Logger.logPrint('Error during analysis of {0}{1}: {2}'.format(region.strainName,
                                                            region.wormName,
                                                            str(e)))
        return 'Failed'
コード例 #8
0
def cleanUpProcess(wormVideo):
    # merge results into original output file
    # should work
    path, name = os.path.split(wormVideo.storeFile)
    outputFile = os.path.join(path, 'merge_' + name)

    try:
        # copy video info
        Logger.logPrint('Merging HDF5 output files...')
        obj = '/video'

        # Linux: create command, do no escape argument values and keep arguments as separated argument list:

        # have to copy because the merge will fail if there are any duplicates
        cmd = [
            hdf5path + 'h5copy', '-i',
            os.path.join(path, name), '-o', outputFile, '-s', obj, '-d', obj,
            '-p'
        ]

        Logger.logPrint('Executing:' + ' '.join(cmd))
        Logger.logPrint(check_output(cmd))

        # remove premerge file
        os.remove(wormVideo.storeFile)

        for region in wormVideo.regions:
            try:
                args = [
                    hdf5path + 'h5copy', '-i',
                    os.path.join(path, '{1}_{2}_{0}'), '-o',
                    os.path.join(path, 'merge_{0}'), '-s', '/worms/{1}/{2}',
                    '-d', '/worms/{1}/{2}', '-p'
                ]
                #update
                cmd = [
                    arg.format(name, region.strainName, region.wormName)
                    for arg in args
                ]

                Logger.logPrint('Executing:' + ' '.join(cmd))
                Logger.logPrint('Output:' + check_output(cmd, stderr=STDOUT))
                # remove premerge file
                os.remove(
                    os.path.join(path,
                                 '{1}_{2}_{0}').format(name, region.strainName,
                                                       region.wormName))
            except (Exception) as e:
                Logger.logPrint('Error cleaning up:')
                Logger.logPrint('Exception:' + str(e))

        # rename merge file
        os.rename(outputFile, wormVideo.storeFile)
    except (Exception) as e:
        Logger.logPrint('Error cleaning up:')
        Logger.logPrint('Exception:' + str(e))
コード例 #9
0
def parallelPostProcessRegions(storeFile):
    path, name = os.path.split(storeFile)
    outputFile = os.path.join(path, 'merge_' + name)

    try:
        # copy video info
        Logger.logPrint('Splitting HDF5 output files...')
        obj = '/video'

        # Linux: create command, do no escape argument values and keep arguments as separated argument list:

        # have to copy because the merge will fail if there are any duplicates
        cmd = [
            hdf5path + 'h5copy', '-i',
            os.path.join(path, name), '-o', outputFile, '-s', obj, '-d', obj,
            '-p'
        ]

        Logger.logPrint('Executing:' + ' '.join(cmd))
        Logger.logPrint(check_output(cmd))

        postProcessList = []

        # split worms into separate files to avoid IO issues
        with h5py.File(storeFile, 'r') as f:
            strains = f['worms'].keys()
            for strain in strains:
                worms = f['worms'][strain].keys()
                for worm in worms:
                    # split each worm into a separate hdf5 store
                    wormFileName = '{1}_{2}_{0}'.format(name, strain, worm)
                    wormFile = os.path.join(path, wormFileName)
                    # first copy video info over
                    cmd = [
                        hdf5path + 'h5copy', '-i', storeFile, '-o', wormFile,
                        '-s', obj, '-d', obj, '-p'
                    ]
                    Logger.logPrint('Executing:' + ' '.join(cmd))
                    Logger.logPrint(check_output(cmd))

                    # then copy worm data over
                    wormObj = '/worms/{0}/{1}'.format(strain, worm)
                    cmd = [
                        hdf5path + 'h5copy', '-i', storeFile, '-o', wormFile,
                        '-s', wormObj, '-d', wormObj, '-p'
                    ]
                    Logger.logPrint('Executing:' + ' '.join(cmd))
                    Logger.logPrint(check_output(cmd))

                    # add worm to list to postprocess
                    postProcessList.append((wormFile, strain, worm))

        # parallel postprocess each region
        pool = multiprocessing.Pool()
        result = pool.map_async(postProcessRegion, postProcessList)
        Logger.logPrint(','.join([str(r) for r in result.get()]))
        pool.close()
        pool.join()
        Logger.logPrint('Finished analyzing all regions')

        # merge files back together
        for (wormFile, strain, worm) in postProcessList:
            try:
                args = [
                    hdf5path + 'h5copy', '-i', wormFile, '-o', outputFile,
                    '-s', '/worms/{0}/{1}', '-d', '/worms/{0}/{1}', '-p'
                ]
                #update
                cmd = [arg.format(strain, worm) for arg in args]

                Logger.logPrint('Executing:' + ' '.join(cmd))
                Logger.logPrint('Output:' + check_output(cmd, stderr=STDOUT))

                # remove premerge file
                os.remove(wormFile)
            except (Exception) as e:
                Logger.logPrint('Error cleaning up:')
                Logger.logPrint('Exception:' + str(e))

        # remove original file
        os.remove(storeFile)

        # rename merge file
        os.rename(outputFile, storeFile)
    except (Exception) as e:
        Logger.logPrint('Error cleaning up:')
        Logger.logPrint('Exception:' + str(e))
コード例 #10
0
ファイル: parallel.py プロジェクト: stephenhelms/WormTracker
def cleanUpProcess(wormVideo):
    # merge results into original output file
    # should work
    path, name = os.path.split(wormVideo.storeFile)
    outputFile = os.path.join(path, 'merge_' + name)

    try:
        # copy video info
        Logger.logPrint('Merging HDF5 output files...')
        obj = '/video'

        # Linux: create command, do no escape argument values and keep arguments as separated argument list: 

        # have to copy because the merge will fail if there are any duplicates
        cmd = [hdf5path + 'h5copy', '-i', os.path.join(path, name), '-o',
             outputFile, '-s', obj, '-d', obj, '-p']
        
        Logger.logPrint('Executing:'+' '.join(cmd))
        Logger.logPrint(check_output(cmd))

        # remove premerge file
        os.remove(wormVideo.storeFile)
  
        for region in wormVideo.regions:
            try:
                args = [hdf5path + 'h5copy', '-i', os.path.join(path,'{1}_{2}_{0}'),
                              '-o', os.path.join(path, 'merge_{0}'), '-s',
                              '/worms/{1}/{2}','-d', '/worms/{1}/{2}',
                              '-p']
                #update 
                cmd=[arg.format(name,region.strainName,region.wormName) for arg in args] 

                Logger.logPrint('Executing:'+' '.join(cmd))
                Logger.logPrint('Output:'+check_output(cmd, stderr=STDOUT))
                # remove premerge file
                os.remove(os.path.join(path,
                                       '{1}_{2}_{0}').format(name,
                                                             region.strainName,
                                                             region.wormName))
            except(Exception) as e:
                Logger.logPrint('Error cleaning up:')
                Logger.logPrint('Exception:'+str(e))

        # rename merge file
        os.rename(outputFile, wormVideo.storeFile)
    except(Exception) as e:
        Logger.logPrint('Error cleaning up:')
        Logger.logPrint('Exception:'+str(e))
コード例 #11
0
ファイル: parallel.py プロジェクト: stephenhelms/WormTracker
def parallelPostProcessRegions(storeFile):
    path, name = os.path.split(storeFile)
    outputFile = os.path.join(path, 'merge_' + name)

    try:
        # copy video info
        Logger.logPrint('Splitting HDF5 output files...')
        obj = '/video'

        # Linux: create command, do no escape argument values and keep arguments as separated argument list: 

        # have to copy because the merge will fail if there are any duplicates
        cmd = [hdf5path + 'h5copy', '-i', os.path.join(path, name), '-o',
             outputFile, '-s', obj, '-d', obj, '-p']
        
        Logger.logPrint('Executing:'+' '.join(cmd))
        Logger.logPrint(check_output(cmd))

        postProcessList = []

        # split worms into separate files to avoid IO issues
        with h5py.File(storeFile, 'r') as f:
            strains = f['worms'].keys()
            for strain in strains:
                worms = f['worms'][strain].keys()
                for worm in worms:
                    # split each worm into a separate hdf5 store
                    wormFileName = '{1}_{2}_{0}'.format(name, strain, worm)
                    wormFile = os.path.join(path, wormFileName)
                    # first copy video info over
                    cmd = [hdf5path + 'h5copy', '-i', storeFile, '-o',
                           wormFile, '-s', obj, '-d', obj, '-p']
                    Logger.logPrint('Executing:'+' '.join(cmd))
                    Logger.logPrint(check_output(cmd))

                    # then copy worm data over
                    wormObj = '/worms/{0}/{1}'.format(strain, worm)
                    cmd = [hdf5path + 'h5copy', '-i', storeFile, '-o',
                           wormFile, '-s', wormObj, '-d', wormObj, '-p']
                    Logger.logPrint('Executing:'+' '.join(cmd))
                    Logger.logPrint(check_output(cmd))

                    # add worm to list to postprocess
                    postProcessList.append((wormFile, strain, worm))

        # parallel postprocess each region
        pool = multiprocessing.Pool()
        result = pool.map_async(postProcessRegion, postProcessList)
        Logger.logPrint(','.join([str(r) for r in result.get()]))
        pool.close()
        pool.join()
        Logger.logPrint('Finished analyzing all regions')

        # merge files back together
        for (wormFile, strain, worm) in postProcessList:
            try:
                args = [hdf5path + 'h5copy', '-i', wormFile,
                              '-o', outputFile, '-s',
                              '/worms/{0}/{1}','-d', '/worms/{0}/{1}',
                              '-p']
                #update 
                cmd=[arg.format(strain,worm) for arg in args] 

                Logger.logPrint('Executing:'+' '.join(cmd))
                Logger.logPrint('Output:'+check_output(cmd, stderr=STDOUT))
                
                # remove premerge file
                os.remove(wormFile)
            except(Exception) as e:
                Logger.logPrint('Error cleaning up:')
                Logger.logPrint('Exception:'+str(e))

        # remove original file
        os.remove(storeFile)

        # rename merge file
        os.rename(outputFile, storeFile)
    except(Exception) as e:
        Logger.logPrint('Error cleaning up:')
        Logger.logPrint('Exception:'+str(e))
コード例 #12
0
 def postProcess(self):
     Logger.logPrint('Identifying bad frames...')
     self.identifyBadFrames()
     Logger.logPrint('Extracting postural data...')
     self.extractPosturalData()
     Logger.logPrint('Fixing order of postural data...')
     self.fixPosturalOrdering()
     Logger.logPrint('Segmenting trajectory...')
     self.segment()
     Logger.logPrint('Assigning head...')
     if self.usePosturalHeadAssignment:
         self.assignHeadTail()
     else:
         self.assignHeadTailCentroidOnly()
     Logger.logPrint('Ordering postural data head to tail...')
     self.orderHeadTail()
     Logger.logPrint('Calculating centroid motion variables...')
     self.calculateCentroidMeasurements()
     Logger.logPrint('Calculating postural measurements...')
     self.calculatePosturalMeasurements()
コード例 #13
0
def main(argv):
    # parse input arguments
    parser = argparse.ArgumentParser()
    parser.add_argument('-i', '--input',
                        help="the input YAML configuration file or h5 store file")
    parser.add_argument('-s', '--serial',
                        help="run in serial mode",
                        action="store_true")
    args = parser.parse_args()

    batchStart = time.time()

    Logger.logPrint("Starting batchprocess: "+args.input)
    Logger.logPrint("Start time:"+timeStr(batchStart))

    fileName, fileExtension = os.path.splitext(args.input)
    # If given a store file (.h5), just use that
    if fileExtension == '.h5':
        storeFiles = [args.input]
    else:
        with open(args.input, 'r') as f:
            storeFiles = wtc.extractStoreFileList(f)

    # postprocessing
    if args.serial:
        for storeFile in storeFiles:
            with h5py.File(storeFile, 'r+') as f:
                strains = f['worms'].keys()
                for strain in strains:
                    worms = f['worms'][strain].keys()
                    for worm in worms:
                        pp = wtpp.WormTrajectoryPostProcessor(f, strain, worm)
                        pp.postProcess()
                        pp.store()
    else:
        for storeFile in storeFiles:
            Logger.logPrint('Post-processing: ' + storeFile)
            wtp.parallelPostProcessRegions(storeFile)

    batchStop = time.time()
    Logger.logPrint("Start time:"+timeStr(batchStart))
    Logger.logPrint("End time  :"+timeStr(batchStop))
    Logger.logPrint("Total time:"+timeStr(batchStop-batchStart))

    return 'Success'