def build_model(input_var,ExptDict):
    # Unpack necessary variables
    model      = ExptDict["model"]["model_id"]
    n_loc      = ExptDict["task"]["n_loc"]
    n_out      = ExptDict["task"]["n_out"]
    batch_size = ExptDict["batch_size"]
    n_in       = ExptDict["n_in"]
    n_hid      = ExptDict["n_hid"]
    out_nonlin = ExptDict["task"]["out_nonlin"]
    
    if model == 'LeInitRecurrent':  
        diag_val     = ExptDict["model"]["diag_val"]
        offdiag_val  = ExptDict["model"]["offdiag_val"]
        l_out, l_rec = models.LeInitRecurrent(input_var, batch_size=batch_size, 
                                              n_in=(n_loc+1)*n_in, n_out=n_out, 
                                              n_hid=n_hid, diag_val=diag_val,
                                              offdiag_val=offdiag_val,
                                              out_nlin=out_nonlin)
    elif model == 'OrthoInitRecurrent':  
        init_val = ExptDict["model"]["init_val"]
        l_out, l_rec = models.OrthoInitRecurrent(input_var, batch_size=batch_size, 
                                              n_in=(n_loc+1)*n_in, n_out=n_out, 
                                              n_hid=n_hid, init_val=init_val, 
                                              out_nlin=out_nonlin)
    elif model == 'ResidualRecurrent':
        leak_inp = ExptDict["model"]["leak_inp"]
        leak_hid = ExptDict["model"]["leak_hid"]
        l_out, l_rec = models.ResidualRecurrent(input_var, batch_size=batch_size, 
                                                n_in=(n_loc+1)*n_in, n_out=n_out, 
                                                n_hid=n_hid, leak_inp=leak_inp, 
                                                leak_hid=leak_hid, 
                                                out_nlin=out_nonlin)
    elif model == 'GRURecurrent':
        diag_val     = ExptDict["model"]["diag_val"]
        offdiag_val  = ExptDict["model"]["offdiag_val"]
        l_out, l_rec = models.GRURecurrent(input_var, batch_size=batch_size, 
                                           n_in=(n_loc+1)*n_in, n_out=n_out, n_hid=n_hid, 
                                           diag_val=diag_val, offdiag_val=offdiag_val,
                                           out_nlin=out_nonlin)    
    return l_out, l_rec
Beispiel #2
0
def build_model(input_var,ExptDict):
    # Unpack necessary variables
    model      = ExptDict["model"]["model_id"]
    n_loc      = ExptDict["task"]["n_loc"]
    n_out      = ExptDict["task"]["n_out"]
    batch_size = ExptDict["batch_size"]
    n_in       = ExptDict["n_in"]
    n_hid      = ExptDict["n_hid"]
    out_nonlin = ExptDict["task"]["out_nonlin"]
    
    if model == 'LeInitRecurrent':  
        diag_val     = ExptDict["model"]["diag_val"]
        offdiag_val  = ExptDict["model"]["offdiag_val"]
        l_out, l_rec = models.LeInitRecurrent(input_var, batch_size=batch_size, 
                                              n_in=n_loc*n_in, n_out=n_out, 
                                              n_hid=n_hid, diag_val=diag_val,
                                              offdiag_val=offdiag_val,
                                              out_nlin=out_nonlin)
    elif model == 'TanhRecurrent':
        wscale       = ExptDict["model"]["wscale"]
        l_out, l_rec = models.TanhRecurrent(input_var, batch_size=batch_size,
                                              n_in=n_loc*n_in, n_out=n_out,
                                              n_hid=n_hid, wscale=wscale,
                                              out_nlin=out_nonlin)
    elif model == 'LeInitRecurrentWithFastWeights':
        diag_val     = ExptDict["model"]["diag_val"]
        offdiag_val  = ExptDict["model"]["offdiag_val"]
        gamma        = ExptDict["model"]["gamma"]
        l_out, l_rec = models.LeInitRecurrentWithFastWeights(input_var, batch_size=batch_size, 
                                              n_in=n_loc*n_in, n_out=n_out, 
                                              n_hid=n_hid, diag_val=diag_val,
                                              offdiag_val=offdiag_val,
                                              out_nlin=out_nonlin, gamma=gamma)
    elif model == 'OrthoInitRecurrent':  
        init_val = ExptDict["model"]["init_val"]
        l_out, l_rec = models.OrthoInitRecurrent(input_var, batch_size=batch_size, 
                                              n_in=n_loc*n_in, n_out=n_out, 
                                              n_hid=n_hid, init_val=init_val, 
                                              out_nlin=out_nonlin)
    elif model == 'ResidualRecurrent':
        leak_inp = ExptDict["model"]["leak_inp"]
        leak_hid = ExptDict["model"]["leak_hid"]
        wscale = ExptDict["model"]["wscale"]
        l_out, l_rec = models.ResidualRecurrent(input_var, batch_size=batch_size, 
                                                n_in=n_loc*n_in, n_out=n_out, 
                                                n_hid=n_hid, wscale=wscale, leak_inp=leak_inp,
                                                leak_hid=leak_hid, 
                                                out_nlin=out_nonlin)
    elif model == 'GRURecurrent':
        diag_val     = ExptDict["model"]["diag_val"]
        offdiag_val  = ExptDict["model"]["offdiag_val"]
        l_out, l_rec = models.GRURecurrent(input_var, batch_size=batch_size, 
                                           n_in=n_loc*n_in, n_out=n_out, n_hid=n_hid, 
                                           diag_val=diag_val, offdiag_val=offdiag_val,
                                           out_nlin=out_nonlin)
    elif model == 'LeInitRecurrentWithLayerNorm':
        diag_val     = ExptDict["model"]["diag_val"]
        offdiag_val  = ExptDict["model"]["offdiag_val"]
        l_out, l_rec = models.LeInitRecurrentWithLayerNorm(input_var, batch_size=batch_size,
                                              n_in=n_loc*n_in, n_out=n_out,
                                              n_hid=n_hid, diag_val=diag_val,
                                              offdiag_val=offdiag_val,
                                              out_nlin=out_nonlin)
    return l_out, l_rec
Beispiel #3
0
        l_out, l_rec = models.LeInitRecurrent(
            input_var,
            mask_var=mask_var,
            batch_size=generator.batch_size,
            n_in=generator.n_loc * generator.n_in,
            n_out=generator.n_out,
            n_hid=n_hid,
            diag_val=diag_val,
            offdiag_val=offdiag_val,
            out_nlin=lasagne.nonlinearities.sigmoid)
    elif model == 'ResidualRecurrent':
        l_out, l_rec = models.ResidualRecurrent(
            input_var,
            mask_var=mask_var,
            batch_size=generator.batch_size,
            n_in=generator.n_loc * generator.n_in,
            n_out=generator.n_out,
            n_hid=n_hid,
            leak_inp=1.0,
            leak_hid=1.0)
    elif model == 'GRURecurrent':
        l_out, l_rec = models.GRURecurrent(input_var,
                                           mask_var=mask_var,
                                           batch_size=generator.batch_size,
                                           n_in=generator.n_loc *
                                           generator.n_in,
                                           n_out=generator.n_out,
                                           n_hid=n_hid)

    # The generated output variable and the loss function
    pred_var = T.clip(lasagne.layers.get_output(l_out), 1e-6, 1.0 - 1e-6)