def initialize_parameters(default_model='dummy_file_to_never_be_used.txt'):

    # Set variables from environment variables
    import os
    dl_backend = os.getenv(
        'CANDLE_DL_BACKEND'
    )  # set in input file (and checked and exported in preprocess.py)
    # default_model = os.getenv('CANDLE_DEFAULT_MODEL_FILE') # set in submit-job/command_script.sh... note that this is needed here for running in interactive mode, where default_model is NOT automatically set in model_runner.py per Justin's recent ~5/13/21 fix... actually hoping to fix this now by adding --config_file to the script argument in interactive mode, so commenting this out again (5/17/21)
    desc = os.getenv('CANDLE_MODEL_DESCRIPTION')  # set in run_workflows.sh
    prog_name = os.getenv('CANDLE_PROG_NAME')  # set in run_workflows.sh

    # Adding this block on 5/15/21 because now, once candle is imported (right after this block), if a backend has not yet been imported, then $CANDLE/Benchmarks/common/candle/__init__.py will die with "No backend has been specified."
    # This doesn't matter for canonically CANDLE-compliant scripts because it is assumed that keras or pytorch have been imported at the top of these .py files.
    # However, for non-CANDLE-compliant scripts in which this script, candle_compliant_wrapper.py, is called instead, it is not assumed that one of these libraries has been imported at the top, by the more natural nature of doing so below in the run() function. This is more natural because, e.g., the entire model should be self-contained, only after which you should have to (if you should have to at all) wrap it in the CANDLE-compliant functions initialize_parameters() and run().
    # Note that this issue only occurs when running interactively. I'm not exactly sure why the same issue does not occur in batch mode off the top of my head, and I did not look into it.
    if dl_backend == 'keras':
        import tensorflow.keras
    elif dl_backend == 'pytorch':
        import torch

    # Build benchmark object
    import candle  # note "candle" is put in the path by the lmod module file
    myBmk = candle.Benchmark(os.path.dirname(os.path.realpath(__file__)),
                             default_model,
                             dl_backend,
                             prog=prog_name,
                             desc=desc)

    # Initialize parameters
    gParameters = candle.finalize_parameters(myBmk)

    return gParameters
Exemplo n.º 2
0
def initialize_parameters(default_model='uno_defaultUQ_model.txt'):

    # Build benchmark object
    unoUQBmk = UQUno(uno.file_path, default_model, 'keras',
    prog='uno_trainUQ', desc='Build neural network based models to predict tumor response to single and paired drugs, including UQ analysis.')

    # Initialize parameters
    gParameters = candle.finalize_parameters(unoUQBmk)
    #benchmark.logger.info('Params: {}'.format(gParameters))

    return gParameters
Exemplo n.º 3
0
def initialize_parameters(default_model = 'tc1_default_model.txt'):

    # Build benchmark object
    tc1Bmk = bmk.BenchmarkTC1(file_path, default_model, 'keras',
    prog='tc1_baseline', desc='Multi-task (DNN) for data extraction from clinical reports - Pilot 3 Benchmark 1')

    # Initialize parameters
    gParameters = candle.finalize_parameters(tc1Bmk)
    #benchmark.logger.info('Params: {}'.format(gParameters))

    return gParameters
Exemplo n.º 4
0
def initialize_parameters(default_model = 'attn_abs_default_model.txt'):

    # Build benchmark object
    attnAbsBmk = BenchmarkAttnAbs(attn.file_path, default_model, 'keras',
    prog='attention_abstention', desc='Attention model with abstention - Pilot 1 Benchmark')

    # Initialize parameters
    gParameters = candle.finalize_parameters(attnAbsBmk)
    #attn.logger.info('Params: {}'.format(gParameters))

    return gParameters
def initialize_parameters(default_model = 'nt3_default_model.txt'):

    # Build benchmark object
    nt3Bmk = bmk.BenchmarkNT3(bmk.file_path, default_model, 'keras',
            prog='nt3_baseline', desc='1D CNN to classify RNA sequence data in normal or tumor classes')

    # Initialize parameters
    gParameters = candle.finalize_parameters(nt3Bmk)
    #benchmark.logger.info('Params: {}'.format(gParameters))

    return gParameters
Exemplo n.º 6
0
def initialize_parameters():
    unet_common = unet.UNET(unet.file_path,
        'unet_params.txt',
        'keras',
        prog='unet_example',
        desc='UNET example'
    )

    # Initialize parameters
    gParameters = candle.finalize_parameters(unet_common)
    return gParameters
Exemplo n.º 7
0
def initialize_parameters():
    mnist_common = mnist.MNIST(mnist.file_path,
                               'mnist_params.txt',
                               'keras',
                               prog='mnist_cnn',
                               desc='MNIST CNN example')

    # Initialize parameters
    gParameters = candle.finalize_parameters(mnist_common)
    csv_logger = CSVLogger('{}/params.log'.format(gParameters))

    return gParameters
Exemplo n.º 8
0
def initialize_parameters():

    # Build benchmark object
    comboBmk = combo.BenchmarkCombo(combo.file_path, 'combo_default_model.txt', 'keras',
        prog='combo_baseline',
        desc = 'Build neural network based models to predict tumor response to drug pairs.')

    # Initialize parameters
    gParameters = candle.finalize_parameters(comboBmk)
    #combo.logger.info('Params: {}'.format(gParameters))

    return gParameters
Exemplo n.º 9
0
def initialize_parameters():
    """ Initialize the parameters for the Advanced example """

    uno_example = bmk.AdvancedExample(
        bmk.file_path,
        'default_model.txt',
        'pytorch',
        prog='advanced_example',
        desc='Differentiable Architecture Search - Advanced example',
    )

    # Initialize parameters
    gParameters = candle.finalize_parameters(uno_example)
    return gParameters
def initialize_parameters(default_model='p1b2_default_model.txt'):

    # Build benchmark object
    p1b2Bmk = p1b2.BenchmarkP1B2(p1b2.file_path,
                                 default_model,
                                 'keras',
                                 prog='p1b2_baseline',
                                 desc='Train Classifier - Pilot 1 Benchmark 2')

    # Initialize parameters
    gParameters = candle.finalize_parameters(p1b2Bmk)
    #p1b2.logger.info('Params: {}'.format(gParameters))

    return gParameters
Exemplo n.º 11
0
def initialize_parameters(default_model='uno_defaultUQ_model.txt'):

    # Build benchmark object
    unoBmk = uno.BenchmarkUno(uno.file_path, default_model, 'keras',
    prog='uno_inferUQ', desc='Read models to predict tumor response to single and paired drugs.')

    unoBmk.additional_definitions += additional_definitions_local
    unoBmk.required = unoBmk.required.union(required_local)

    # Initialize parameters
    gParameters = candle.finalize_parameters(unoBmk)
    #benchmark.logger.info('Params: {}'.format(gParameters))

    return gParameters
Exemplo n.º 12
0
def initialize_parameters():
    """ Initialize the parameters for the P3B5 benchmark """

    p3b5_bench = bmk.BenchmarkP3B5(
        bmk.file_path,
        'p3b5_default_model.txt',
        'pytorch',
        prog='p3b5_baseline',
        desc='Differentiable Architecture Search - Pilot 3 Benchmark 5',
    )

    # Initialize parameters
    gParameters = candle.finalize_parameters(p3b5_bench)
    #bmk.logger.info('Params: {}'.format(gParameters))
    return gParameters
Exemplo n.º 13
0
def initialize_parameters(default_model='calibration_default.txt'):

    # Build benchmark object
    calBmk = CalibrationApp(
        file_path,
        default_model,
        'python',
        prog='calibration_main',
        desc='script to compute empirical calibration for UQ regression')

    # config_file, rng_seed and save_path from standard
    # Finalize parameters
    gParameters = candle.finalize_parameters(calBmk)

    return gParameters
Exemplo n.º 14
0
def initialize_parameters(default_model="combo_default_model.txt"):

    # Build benchmark object
    comboBmk = combo.BenchmarkCombo(
        combo.file_path,
        default_model,
        "keras",
        prog="combo_baseline",
        desc="Build neural network based models to predict tumor response to drug pairs.",
    )

    # Initialize parameters
    gParameters = candle.finalize_parameters(comboBmk)
    combo.logger.info("Params: {}".format(gParameters))

    return gParameters
Exemplo n.º 15
0
def initialize_parameters(default_model='uno_clr_model.txt'):

    # Build benchmark object
    unoBmk = benchmark.BenchmarkUno(
        benchmark.file_path,
        default_model,
        'keras',
        prog='uno_clr',
        desc=
        'Build neural network based models to predict tumor response to single and paired drugs.'
    )

    # Initialize parameters
    gParameters = candle.finalize_parameters(unoBmk)
    # benchmark.logger.info('Params: {}'.format(gParameters))

    return gParameters
Exemplo n.º 16
0
def initialize_parameters(default_model='uno_defaultUQ_model.txt'):

    # Build benchmark object
    unoBmk = uno.BenchmarkUno(
        uno.file_path,
        default_model,
        'keras',
        prog='uno_holdoutUQ_data',
        desc=
        'Build data split for UQ analysis in the problem of prediction of tumor response to drug pairs.'
    )

    # Initialize parameters
    gParameters = candle.finalize_parameters(unoBmk)
    #benchmark.logger.info('Params: {}'.format(gParameters))

    return gParameters
Exemplo n.º 17
0
def initialize_parameters(default_model="attn_default_model.txt"):

    # Build benchmark object
    attnBmk = attn.BenchmarkAttn(
        attn.file_path,
        default_model,
        "keras",
        prog="attn_baseline",
        desc=
        "Multi-task (DNN) for data extraction from clinical reports - Pilot 3 Benchmark 1",
    )

    # Initialize parameters
    gParameters = candle.finalize_parameters(attnBmk)
    # attn.logger.info('Params: {}'.format(gParameters))

    return gParameters
Exemplo n.º 18
0
def initialize_parameters(default_model='p3b4_default_model.txt'):

    # Build benchmark object
    p3b3Bmk = bmk.BenchmarkP3B3(
        bmk.file_path,
        default_model,
        'keras',
        prog='p3b4_baseline',
        desc=
        'Hierarchical Convolutional Attention Networks for data extraction from clinical reports - Pilot 3 Benchmark 4'
    )

    # Initialize parameters
    gParameters = candle.finalize_parameters(p3b3Bmk)
    #bmk.logger.info('Params: {}'.format(gParameters))

    return gParameters
Exemplo n.º 19
0
def initialize_parameters(default_model='unoMT_default_model.txt'):

    # Build benchmark object
    unoMTb = unoMT.unoMTBk(
        unoMT.file_path,
        default_model,
        'pytorch',
        prog='unoMT_baseline',
        desc=
        'Multi-task combined single and combo drug prediction for cross-study data - Pilot 1'
    )

    print("Created unoMT benchmark")

    # Initialize parameters
    gParameters = candle.finalize_parameters(unoMTb)
    print("Parameters initialized")

    return gParameters
Exemplo n.º 20
0
def initialize_parameters(default_model='p2b1_default_model.txt'):

    # Build benchmark object
    p2b1Bmk = p2b1.BenchmarkP2B1(
        p2b1.file_path,
        default_model,
        'keras',
        prog='p2b1_baseline',
        desc='Train Molecular Frame Autoencoder - Pilot 2 Benchmark 1')

    # Initialize parameters
    GP = candle.finalize_parameters(p2b1Bmk)
    # p2b1.logger.info('Params: {}'.format(gParameters))

    print('\nTraining parameters:')
    for key in sorted(GP):
        print("\t%s: %s" % (key, GP[key]))

    # print json.dumps(GP, indent=4, skipkeys=True, sort_keys=True)

    if GP['backend'] != 'theano' and GP['backend'] != 'tensorflow':
        sys.exit('Invalid backend selected: %s' % GP['backend'])

    os.environ['KERAS_BACKEND'] = GP['backend']
    reload(K)
    '''
    if GP['backend'] == 'theano':
        K.set_image_dim_ordering('th')
    elif GP['backend'] == 'tensorflow':
        K.set_image_dim_ordering('tf')
    '''
    K.set_image_data_format('channels_last')
    # "th" format means that the convolutional kernels will have the shape (depth, input_depth, rows, cols)

    # "tf" format means that the convolutional kernels will have the shape (rows, cols, input_depth, depth)
    print("Image data format: ", K.image_data_format())
    #    print "Image ordering: ", K.image_dim_ordering()
    return GP
def initialize_parameters():

    # Import needed environment variables
    import os
    candle_dir = os.getenv("CANDLE")
    default_model = os.getenv("DEFAULT_PARAMS_FILE")
    dl_backend = os.getenv("DL_BACKEND")  # should be either keras or pytorch

    # This block is needed so that "import candle" works
    import sys
    sys.path.append(candle_dir + '/Benchmarks/common')
    if dl_backend == 'keras':
        import keras
    elif dl_backend == 'pytorch':
        import torch
    else:
        print(
            'ERROR: Backend {} is not supported (DL_BACKEND must be exported to "keras" or "pytorch" in submission script)'
            .format(dl_backend))
        exit()
    print('Loaded {} backend'.format(dl_backend))

    # Instantiate the Benchmark class (the values of the prog and desc parameters don't really matter)
    import candle
    mymodel_common = candle.Benchmark(os.path.dirname(
        os.path.realpath(__file__)),
                                      default_model,
                                      dl_backend,
                                      prog='myprogram',
                                      desc='My CANDLE example')

    # Read the parameters (in a dictionary format) pointed to by the environment variable DEFAULT_PARAMS_FILE
    hyperparams = candle.finalize_parameters(mymodel_common)

    # Return this dictionary of parameters
    return (hyperparams)