示例#1
0
def run(parameter_set_name, filenames, grp_pattern, systemsize):
    print 'submitting ...', parameter_set_name
    print dicttoinfo.dicttoinfo(getattr(parameterSetsO2, parameter_set_name))
    print 'for files', filenames

    dirs = set()
    for fn in filenames:
        with h5py.File(fn, 'r') as f:
            d = myutils.walkh5(f, grp_pattern)
            assert len(d), 'you f****d up, pattern "%s" not found in "%s"!' % (
                grp_pattern, fn)
            dirs = set.union(dirs, d)
    print 'and resolved groups therein: %s' % ','.join(dirs)

    o2params = getattr(parameterSetsO2, parameter_set_name)
    o2params['name'] = parameter_set_name
    if callable(o2params):
        o2paramsList = o2params(len(filenames))
    else:
        o2paramsList = itertools.repeat(o2params)

    for (o2params, fn) in zip(o2paramsList, filenames):
        o2params, num_threads = prepareParametersWithNumThreads(
            copy.deepcopy(o2params), systemsize)
        qsub.submit(
            qsub.func(worker_on_client, fn, grp_pattern, o2params),
            name='job_o2_' + parameter_set_name + '_' + basename(fn),
            num_cpus=num_threads,
            days=
            5,  # about one day per thread, the idea being that number of threads is proportional to systems size and runtime is 
            mem='%iMB' % (2000 * num_threads),
            change_cwd=True)
示例#2
0
def DoIt(inputFileNames, pattern, options):
    for this_enlargement_factor in options.enlarge_factor:
        inFiles = [h5files.open(fn, 'r') for fn in inputFileNames]

        inGroups = list(
            itertools.chain.from_iterable(
                myutils.walkh5(f, pattern, return_h5objects=True)
                for f in inFiles))
        if len(inGroups) <= 0:
            print 'no matching groups in hdf file(s)'
            sys.exit(0)
        for in_file in inputFileNames:
            bloodflowparams = krebsutils.pyDictFromParamGroup(
                h5files.open(in_file, 'r')['parameters/calcflow'])
            #enlarge_vessels(float(this_enlargement_factor),in_file, bloodflowparams)

            qsub.submit(
                qsub.func(enlarge_vessels, float(this_enlargement_factor),
                          in_file, bloodflowparams),
                name='job_modify_enlarge_' + str(this_enlargement_factor) +
                '_',
                num_cpus=6,
                days=
                5,  # about one day per thread, the idea being that number of threads is proportional to systems size and runtime is 
                mem='%iMB' % (1000),
                change_cwd=True)
示例#3
0
def rerun(fn_of_previous_run, job_name, mem, days):
  #name, c = krebsjobs.PrepareConfigurationForSubmission(vessel_fn, name, 'fakeTum', paramSet)
  #configstr = dicttoinfo(c)
  #config_file_name = '%s.info' % c['fn_out']
  #with open(config_file_name, 'w') as f:
  #  f.write(configstr)
  qsub.submit(qsub.func(krebs.tumors.rerun_faketum, fn_of_previous_run),
                            name = 'job_'+ job_name,
                            mem = mem,
                            days = days,
                            #num_cpus = c['num_threads'],
                            change_cwd = True)
示例#4
0
def run(vessel_fn, name, paramSet, mem, days):
  name, c = krebsjobs.PrepareConfigurationForSubmission(vessel_fn, name, 'fakeTum', paramSet)
  configstr = dicttoinfo(c)
  config_file_name = '%s.info' % c['fn_out']
  with open(config_file_name, 'w') as f:
    f.write(configstr)
  qsub.submit(qsub.func(krebs.tumors.run_faketum, config_file_name),
                            name = 'job_'+name,
                            mem = mem,
                            days = days,
                            num_cpus = c['num_threads'],
                            change_cwd = True)
示例#5
0
def run_with_vessels(vessel_fn, name, config_, mem, days):
    name, config_ = PrepareConfigurationForSubmission(vessel_fn, name,
                                                      'tumBulk', config_)
    config_ = krebs.tumors.set_lattice_size(config_, vessel_fn)

    sx, sy, sz = config_['lattice_size']
    print('cont size: %i, %i, %i' % (sx, sy, sz))
    #c = PrepConfig_new_python_dict(c)
    configstr = dicttoinfo(config_)
    qsub.submit(qsub.func(krebs.tumors.run_bulktissue_w_vessels, configstr),
                name='job_' + name,
                mem=mem,
                days=days,
                num_cpus=config_['num_threads'],
                change_cwd=True)
示例#6
0
def run_simple(name, config):
    if 1:
        days = 10. if config['ift'] else 0.5
        qsub.submit(
            qsub.func(runs_on_client, name, config),
            name='job_iff' + name,
            num_cpus=config['num_threads'],
            days=days,
            change_cwd=True,
        )
    else:
        fn = splitext(config['fn_out'])[0]
        import json
        with open(fn + '.json', 'w') as f:
            f.write(json.dumps(config, indent=4))
示例#7
0
def run2(parameter_set, filenames, grp_pattern):
  print 'submitting ...', parameter_set['name']
 

  num_threads = 1
  if 'num_threads' in parameter_set:
    num_threads = parameter_set['num_threads']
    
  for fn in filenames:
    qsub.submit(qsub.func(worker_on_client, fn, grp_pattern, parameter_set, num_threads),
                  name = 'job_adaption_'+parameter_set['name']+'_'+basename(fn),
                  num_cpus = num_threads,
                  days = 4.,
                  mem = '3500MB',
                  change_cwd = True)
示例#8
0
def run(vessel_fn, name, paramSet, mem, days):
  
  name, paramSet = PrepareConfigurationForSubmission(vessel_fn, name, 'fakeTumMTS', paramSet)
  configstr = dicttoinfo(paramSet)
  config_file_name = '%s.info' % paramSet['fn_out']
  with open(config_file_name, 'w') as f:
    f.write(configstr)
    
  #o2params = getattr(parameterSetsO2, "breastv3")
  qsub.submit(qsub.func(krebs.tumors.run_faketum_mts, config_file_name),
                            name = 'job_'+name,
                            mem = mem,
                            days = days,
                            num_cpus = paramSet['num_threads'],
                            change_cwd = True)
def run_config_samples(configfactory, num_samples, client_worker_function):
    vdcopy = configfactory(0)
    vdcopy.roots = []
    print vdcopy.generate_info_string()
    for sample_num in num_samples:
        vd = configfactory(sample_num)
        vd.outfilename += '-sample%02i' % sample_num
        vd.num_threads = num_threads
        print 'submitting %s, estimated runtime %f h, %i iters' % (
            vd.outfilename, runtime_sec(vd) / 60 / 60, vd.num_iter)
        fn = vd.outfilename + '.info'
        s = vd.generate_info_string()
        with open(fn, 'w') as f:
            f.write(s)
        qsub.submit(qsub.func(client_worker_function, fn, os.getcwd(),
                              vd.outfilename + '.h5'),
                    name='job_' + basename(vd.outfilename),
                    days=(runtime_sec(vd) / 60 / 60) / 24.,
                    mem="2000MB",
                    num_cpus=num_threads)
    parser.add_argument('-v', '--movie', default=False, action='store_true')

    goodArguments, otherArguments = parser.parse_known_args()
    qsub.parse_args(otherArguments)
    dbg_fn = goodArguments.debug_filename.name
    options = getattr(krebs.povrayRenderSettings, 'dbg_vessels')
    f = h5py.File(dbg_fn, 'r')
    if not goodArguments.noPov:
        jobs = []
        a = jobs.append

        #for fn in filenames:
        with h5files.open(dbg_fn, 'r') as f:
            paths = myutils.walkh5(f, '*/vessels')
            for path in paths:
                j = RenderJob(f, path, '', options)
                a(j)

        for job in jobs:
            t, m = job.runtime_and_mem
            print('submit %s, %i mb, %f h' % (job.imageFilename, m, t))
            qsub.submit(qsub.func(clientfunc, job, os.getcwd()),
                        name='job_render_' + basename(job.imageFilename),
                        num_cpus=job.params['num_threads'],
                        mem=('%iMB' % m),
                        days=t / 24.)

    if goodArguments.movie:
        time.sleep(30)  #wait 30 seconds for the queing system to finish
        create_movie(goodArguments)
示例#11
0
sys.path.append(join(dirname(__file__), '.'))
import stat

import qsub


def ClientWorker(*args, **kwargs):
    print("hello, i'm running on the cluster. my args are %s and %s" %
          (str(args), str(kwargs)))
    print("my cwd is: %s" % os.getcwd())


if not qsub.is_client and __name__ == '__main__':
    argv = qsub.parse_args(sys.argv)

    qsub.submit(qsub.func(ClientWorker, 'Test Argument', a_kw_arg=9001),
                name='qsub-script-python-test',
                num_cpus=1,
                days=0,
                hours=0.1,
                mem='100MB',
                change_cwd=True)

    test_program_name = 'client-test-script.sh'
    with open(test_program_name, 'w') as f:
        f.write('''#! /bin/sh\n''')
        f.write('''echo "hello, i'm running on the cluster"\n''')
        f.write('''echo "my cwd is:"\n''')
        f.write('''pwd\n''')
    os.chmod(test_program_name, stat.S_IXUSR | stat.S_IWUSR | stat.S_IRUSR)
示例#12
0
        config_['lattice_size'] = Vec((20, 20, 20))
    try:
        if not 'lattice_size' in config_.keys():
            raise AssertionError('No lattice_size found in configuration %s' %
                                 getattr(config_))
        if type(config_['lattice_size']) == str:
            if config_['lattice_size'] == 'set me to match the vessel domain':
                raise AssertionError('Find better lattice size')
    except Exception, e:
        print e.message
        sys.exit(-1)
    sx, sy, sz = config_['lattice_size']
    print('cont size: %i, %i, %i' % (sx, sy, sz))
    #c = PrepConfig_new_python_dict(c)
    configstr = dicttoinfo(config_)
    qsub.submit(qsub.func(krebs.tumors.run_bulktissue_no_vessels, configstr),
                name='job_' + name,
                mem=mem,
                days=days,
                num_cpus=config_['num_threads'],
                change_cwd=True)


if not qsub.is_client and __name__ == '__main__':
    #parser = argparse.ArgumentParser(parents=[general_group])
    #subparsers=parser.add_subparsers(dest='action')
    #subparsers.add_parser('Restart',parents=[general_group, second_group])
    #subparsers.add_parser('Start', parents=[general_group])
    import argparse
    parent_parser = argparse.ArgumentParser(
        add_help=False,
示例#13
0
                                 tumorParameterName)
        if not o2ParameterName in dir(krebsjobs.parameters.parameterSetsO2):
            raise AssertionError('Unknown o2 parameter set %s!' %
                                 o2ParameterName)
        for fn in filenames:
            if not os.path.isfile(fn):
                raise AssertionError('The file %s is not present!' % fn)
    except Exception, e:
        print e.message
        sys.exit(-1)

    #tumor_parameter_sets = [('defaultconfig', tum_only_vessels.defaultconfig)]
    #o2_parameter_set_name = 'breastv2'

    tumorParameterSet = getattr(
        krebsjobs.parameters.parameterSetsBulkTissueTumor, tumorParameterName)
    o2ParameterSet = getattr(krebsjobs.parameters.parameterSetsO2,
                             o2ParameterName)
    for vessel_fn in filenames:
        #for name, tumor_parameters in tumor_parameter_sets:
        job_name, tumorParameterSet = krebsjobs.submitFakeTum.PrepareConfigurationForSubmission(
            vessel_fn, tumorParameterName, 'fakeTumDetailed',
            tumorParameterSet)
        qsub.submit(qsub.func(worker_on_client, vessel_fn, tumorParameterSet,
                              o2ParameterName, num_threads),
                    name=('job_%s_%s' % (tumorParameterName, o2ParameterName)),
                    num_cpus=num_threads,
                    days=4.,
                    mem='2500MB',
                    change_cwd=True)
示例#14
0
You should have received a copy of the GNU General Public License
along with this program.  If not, see <http://www.gnu.org/licenses/>.
'''

if __name__ == '__main__':
    import os.path, sys
    sys.path.append(
        os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'))

import os, sys
from os.path import basename
import qsub


def runs_on_client(filename):
    import krebs.plotIff
    import myutils
    import h5py
    dataman = myutils.DataManager(100, krebs.plotIff.GetDataManDataInstances())
    with h5py.File(filename, 'r+') as iff_file:
        krebs.plotIff.ComputeIfpVsIffCorrelationDataLocal(dataman, iff_file)


if not qsub.is_client:
    filenames = sys.argv[1:]
    for i, filename in enumerate(filenames):
        qsub.submit(qsub.func(runs_on_client, filename),
                    name='job_' + basename(filename),
                    num_cpus=1,
                    days=0.1,
                    change_cwd=True)
示例#15
0
    #create filename due to former standards
    filenames = []
    for fn in goodArguments.vesselFileNames:
        filenames.append(fn.name)

    if goodArguments.analyze:
        filenames = goodArguments.vesselFileNames
        grp_pattern = goodArguments.grp_pattern
        try:
            systemsize = int(goodArguments.systemsize)
        except:
            print 'no valid --systemsize given, using 2 as default'
            systemsize = 2
        qsub.submit(
            qsub.func(worker_plots_for_paper, filenames, grp_pattern),
            name='job_o2_analysis',
            num_cpus=
            1,  # single threaded only, but scale memory usage with num_threads
            days=2,
            mem='%iMB' % (1000 * systemsize),
            change_cwd=True)
    if goodArguments.render:
        #filenames   = parseResult.args[:-1]
        grp_pattern = goodArguments.grp_pattern
        try:
            systemsize = int(goodArguments.systemsize)
        except:
            print 'no valid --systemsize given, using 2 as default'
            systemsize = 2
        for fn in filenames: