def jobman_entrypoint(state, channel):
    # record mercurial versions of each package
    pylearn.version.record_versions(state,[theano,ift6266,pylearn])
    # TODO: remove this, bad for number of simultaneous requests on DB
    channel.save()

    # For test runs, we don't want to use the whole dataset so
    # reduce it to fewer elements if asked to.
    rtt = None
    if state.has_key('reduce_train_to'):
        rtt = state['reduce_train_to']
    elif REDUCE_TRAIN_TO:
        rtt = REDUCE_TRAIN_TO
        
    if state.has_key('decrease_lr'):
        decrease_lr = state['decrease_lr']
    else :
        decrease_lr = 0
        
    if state.has_key('decrease_lr_pretrain'):
        dec=state['decrease_lr_pretrain']
    else :
        dec=0
 
    n_ins = 32*32

    if state.has_key('subdataset'):
        subdataset_name=state['subdataset']
    else:
        subdataset_name=SUBDATASET_NIST

    #n_outs = 62 # 10 digits, 26*2 (lower, capitals)
    if subdataset_name == "upper":
	n_outs = 26
	subdataset = datasets.nist_upper()
	examples_per_epoch = NIST_UPPER_TRAIN_SIZE
    elif subdataset_name == "lower":
	n_outs = 26
	subdataset = datasets.nist_lower()
	examples_per_epoch = NIST_LOWER_TRAIN_SIZE
    elif subdataset_name == "digits":
	n_outs = 10
	subdataset = datasets.nist_digits()
	examples_per_epoch = NIST_DIGITS_TRAIN_SIZE
    else:
	n_outs = 62
	subdataset = datasets.nist_all()
	examples_per_epoch = NIST_ALL_TRAIN_SIZE
    
    print 'Using subdataset ', subdataset_name

    #To be sure variables will not be only in the if statement
    PATH = ''
    nom_reptrain = ''
    nom_serie = ""
    if state['pretrain_choice'] == 0:
        nom_serie="series_NIST.h5"
    elif state['pretrain_choice'] == 1:
        nom_serie="series_P07.h5"

    series = create_series(state.num_hidden_layers,nom_serie)


    print "Creating optimizer with state, ", state

    optimizer = SdaSgdOptimizer(dataset_name=subdataset_name,\
				    dataset=subdataset,\
                                    hyperparameters=state, \
                                    n_ins=n_ins, n_outs=n_outs,\
                                    examples_per_epoch=examples_per_epoch, \
                                    series=series,
                                    max_minibatches=rtt)

    parameters=[]
    #Number of files of P07 used for pretraining
    nb_file=0

    print('\n\tpretraining with NIST\n')

    optimizer.pretrain(subdataset, decrease = dec) 

    channel.save()
    
    #Set some of the parameters used for the finetuning
    if state.has_key('finetune_set'):
        finetune_choice=state['finetune_set']
    else:
        finetune_choice=FINETUNE_SET
    
    if state.has_key('max_finetuning_epochs'):
        max_finetune_epoch_NIST=state['max_finetuning_epochs']
    else:
        max_finetune_epoch_NIST=MAX_FINETUNING_EPOCHS
    
    if state.has_key('max_finetuning_epochs_P07'):
        max_finetune_epoch_P07=state['max_finetuning_epochs_P07']
    else:
        max_finetune_epoch_P07=max_finetune_epoch_NIST
    
    #Decide how the finetune is done
    
    if finetune_choice == 0:
        print('\n\n\tfinetune with NIST\n\n')
        optimizer.reload_parameters('params_pretrain.txt')
        optimizer.finetune(subdataset,subdataset,max_finetune_epoch_NIST,ind_test=1,decrease=decrease_lr)
        channel.save()
    if finetune_choice == 1:
        print('\n\n\tfinetune with P07\n\n')
        optimizer.reload_parameters('params_pretrain.txt')
        optimizer.finetune(datasets.nist_P07(),datasets.nist_all(),max_finetune_epoch_P07,ind_test=0,decrease=decrease_lr)
        channel.save()
    if finetune_choice == 2:
        print('\n\n\tfinetune with P07 followed by NIST\n\n')
        optimizer.reload_parameters('params_pretrain.txt')
        optimizer.finetune(datasets.nist_P07(),datasets.nist_all(),max_finetune_epoch_P07,ind_test=20,decrease=decrease_lr)
        optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=21,decrease=decrease_lr)
        channel.save()
    if finetune_choice == 3:
        print('\n\n\tfinetune with NIST only on the logistic regression on top (but validation on P07).\n\
        All hidden units output are input of the logistic regression\n\n')
        optimizer.reload_parameters('params_pretrain.txt')
        optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1,special=1,decrease=decrease_lr)
        
        
    if finetune_choice==-1:
        print('\nSERIE OF 4 DIFFERENT FINETUNINGS')
        print('\n\n\tfinetune with NIST\n\n')
        sys.stdout.flush()
        optimizer.reload_parameters('params_pretrain.txt')
        optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1,decrease=decrease_lr)
        channel.save()
        print('\n\n\tfinetune with P07\n\n')
        sys.stdout.flush()
        optimizer.reload_parameters('params_pretrain.txt')
        optimizer.finetune(datasets.nist_P07(),datasets.nist_all(),max_finetune_epoch_P07,ind_test=0,decrease=decrease_lr)
        channel.save()
        print('\n\n\tfinetune with P07 (done earlier) followed by NIST (written here)\n\n')
        sys.stdout.flush()
        optimizer.reload_parameters('params_finetune_P07.txt')
        optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=21,decrease=decrease_lr)
        channel.save()
        print('\n\n\tfinetune with NIST only on the logistic regression on top.\n\
        All hidden units output are input of the logistic regression\n\n')
        sys.stdout.flush()
        optimizer.reload_parameters('params_pretrain.txt')
        optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1,special=1,decrease=decrease_lr)
        channel.save()
    
    channel.save()

    return channel.COMPLETE
def jobman_entrypoint(state, channel,set_choice):
    # record mercurial versions of each package
    pylearn.version.record_versions(state,[theano,ift6266,pylearn])
    # TODO: remove this, bad for number of simultaneous requests on DB
    channel.save()

    # For test runs, we don't want to use the whole dataset so
    # reduce it to fewer elements if asked to.
    rtt = None
    if state.has_key('reduce_train_to'):
        rtt = state['reduce_train_to']
    elif REDUCE_TRAIN_TO:
        rtt = REDUCE_TRAIN_TO
 
    n_ins = 32*32
    n_outs = 62 # 10 digits, 26*2 (lower, capitals)
     
    examples_per_epoch = NIST_ALL_TRAIN_SIZE

    PATH = ''
    if set_choice == 0:
        maximum_exemples=int(500000) #Maximum number of exemples seen
    else:
        maximum_exemples = int(1000000000)  #an impossible number



    print "Creating optimizer with state, ", state

    optimizer = SdaSgdOptimizer(dataset=datasets.nist_all(), 
                                    hyperparameters=state, \
                                    n_ins=n_ins, n_outs=n_outs,\
                                    examples_per_epoch=examples_per_epoch, \
                                    max_minibatches=rtt)	


    

    if os.path.exists(PATH+'params_finetune_NIST.txt'):
        print ('\n finetune = NIST ')
        optimizer.reload_parameters(PATH+'params_finetune_NIST.txt')
        print "For" + str(maximum_exemples) + "over the NIST set: "
        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
        print "For" + str(maximum_exemples) + "over the P07 set: "
        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
        print "For" + str(maximum_exemples) + "over the PNIST07 set: "
        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
        
    
    if os.path.exists(PATH+'params_finetune_P07.txt'):
        print ('\n finetune = P07 ')
        optimizer.reload_parameters(PATH+'params_finetune_P07.txt')
        print "For" + str(maximum_exemples) + "over the NIST set: "
        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
        print "For" + str(maximum_exemples) + "over the P07 set: "
        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
        print "For" + str(maximum_exemples) + "over the PNIST07 set: "
        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)

    
    if os.path.exists(PATH+'params_finetune_NIST_then_P07.txt'):
        print ('\n finetune = NIST then P07')
        optimizer.reload_parameters(PATH+'params_finetune_NIST_then_P07.txt')
        print "For" + str(maximum_exemples) + "over the NIST set: "
        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
        print "For" + str(maximum_exemples) + "over the P07 set: "
        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
        print "For" + str(maximum_exemples) + "over the PNIST07 set: "
        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
    
    if os.path.exists(PATH+'params_finetune_P07_then_NIST.txt'):
        print ('\n finetune = P07 then NIST')
        optimizer.reload_parameters(PATH+'params_finetune_P07_then_NIST.txt')
        print "For" + str(maximum_exemples) + "over the NIST set: "
        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
        print "For" + str(maximum_exemples) + "over the P07 set: "
        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
        print "For" + str(maximum_exemples) + "over the PNIST07 set: "
        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
    
    if os.path.exists(PATH+'params_finetune_PNIST07.txt'):
        print ('\n finetune = PNIST07')
        optimizer.reload_parameters(PATH+'params_finetune_PNIST07.txt')
        print "For" + str(maximum_exemples) + "over the NIST set: "
        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
        print "For" + str(maximum_exemples) + "over the P07 set: "
        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
        print "For" + str(maximum_exemples) + "over the PNIST07 set: "
        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
        
    if os.path.exists(PATH+'params_finetune_PNIST07_then_NIST.txt'):
        print ('\n finetune = PNIST07 then NIST')
        optimizer.reload_parameters(PATH+'params_finetune_PNIST07_then_NIST.txt')
        print "For" + str(maximum_exemples) + "over the NIST set: "
        optimizer.training_error(datasets.nist_all(maxsize=maximum_exemples),set_choice)
        print "For" + str(maximum_exemples) + "over the P07 set: "
        optimizer.training_error(datasets.nist_P07(maxsize=maximum_exemples),set_choice)
        print "For" + str(maximum_exemples) + "over the PNIST07 set: "
        optimizer.training_error(datasets.PNIST07(maxsize=maximum_exemples),set_choice)
    
    channel.save()

    return channel.COMPLETE
def jobman_entrypoint(state, channel):
    # record mercurial versions of each package
    pylearn.version.record_versions(state, [theano, ift6266, pylearn])
    # TODO: remove this, bad for number of simultaneous requests on DB
    channel.save()

    # For test runs, we don't want to use the whole dataset so
    # reduce it to fewer elements if asked to.
    rtt = None
    if state.has_key("reduce_train_to"):
        rtt = state["reduce_train_to"]
    elif REDUCE_TRAIN_TO:
        rtt = REDUCE_TRAIN_TO

    n_ins = 32 * 32
    n_outs = 62  # 10 digits + 26*2 (lower, capitals)

    examples_per_epoch = NIST_ALL_TRAIN_SIZE

    PATH = PATH_P07
    maximum_exemples = int(100)  # Maximum number of exemples seen

    print "Creating optimizer with state, ", state

    optimizer = SdaSgdOptimizer(
        dataset=datasets.nist_all(),
        hyperparameters=state,
        n_ins=n_ins,
        n_outs=n_outs,
        examples_per_epoch=examples_per_epoch,
        max_minibatches=rtt,
    )

    print "The model is created"
    if os.path.exists(PATH + "params_finetune_NIST.txt"):
        print ("\n finetune = NIST ")
        optimizer.reload_parameters(PATH + "params_finetune_NIST.txt")
        print "For" + str(maximum_exemples) + "over the NIST test set: "
        optimizer.see_error(datasets.nist_all(maxsize=maximum_exemples))

    if os.path.exists(PATH + "params_finetune_P07.txt"):
        print ("\n finetune = P07 ")
        optimizer.reload_parameters(PATH + "params_finetune_P07.txt")
        print "For" + str(maximum_exemples) + "over the P07 test set: "
        optimizer.see_error(datasets.nist_P07(maxsize=maximum_exemples))

    if os.path.exists(PATH + "params_finetune_NIST_then_P07.txt"):
        print ("\n finetune = NIST then P07")
        optimizer.reload_parameters(PATH + "params_finetune_NIST_then_P07.txt")
        print "For" + str(maximum_exemples) + "over the NIST test set: "
        optimizer.see_error(datasets.nist_all(maxsize=maximum_exemples))
        print "For" + str(maximum_exemples) + "over the P07 test set: "
        optimizer.see_error(datasets.nist_P07(maxsize=maximum_exemples))

    if os.path.exists(PATH + "params_finetune_P07_then_NIST.txt"):
        print ("\n finetune = P07 then NIST")
        optimizer.reload_parameters(PATH + "params_finetune_P07_then_NIST.txt")
        print "For" + str(maximum_exemples) + "over the P07 test set: "
        optimizer.see_error(datasets.nist_P07(maxsize=maximum_exemples))
        print "For" + str(maximum_exemples) + "over the NIST test set: "
        optimizer.see_error(datasets.nist_all(maxsize=maximum_exemples))

    channel.save()

    return channel.COMPLETE
def jobman_entrypoint(state, channel):
    # record mercurial versions of each package
    pylearn.version.record_versions(state,[theano,ift6266,pylearn])
    # TODO: remove this, bad for number of simultaneous requests on DB
    channel.save()

    # For test runs, we don't want to use the whole dataset so
    # reduce it to fewer elements if asked to.
    rtt = None
    if state.has_key('reduce_train_to'):
        rtt = state['reduce_train_to']
    elif REDUCE_TRAIN_TO:
        rtt = REDUCE_TRAIN_TO
        
    if state.has_key('decrease_lr'):
        decrease_lr = state['decrease_lr']
    else :
        decrease_lr = 0
 
    n_ins = 32*32
    n_outs = 62 # 10 digits, 26*2 (lower, capitals)
     
    examples_per_epoch = NIST_ALL_TRAIN_SIZE
    #To be sure variables will not be only in the if statement
    PATH = ''
    nom_reptrain = ''
    nom_serie = ""
    if state['pretrain_choice'] == 0:
        PATH=PATH_NIST
        nom_pretrain='NIST'
        nom_serie="series_NIST.h5"
    elif state['pretrain_choice'] == 1:
        PATH=PATH_P07
        nom_pretrain='P07'
        nom_serie="series_P07.h5"

    series = create_series(state.num_hidden_layers,nom_serie)

    print "Creating optimizer with state, ", state

    optimizer = SdaSgdOptimizer(dataset=datasets.nist_all(), 
                                    hyperparameters=state, \
                                    n_ins=n_ins, n_outs=n_outs,\
                                    examples_per_epoch=examples_per_epoch, \
                                    series=series,
                                    max_minibatches=rtt)	

    parameters=[]
    #Number of files of P07 used for pretraining
    nb_file=0
##    if state['pretrain_choice'] == 0:
##        print('\n\tpretraining with NIST\n')
##        optimizer.pretrain(datasets.nist_all()) 
##    elif state['pretrain_choice'] == 1:
##        #To know how many file will be used during pretraining
##        nb_file = state['pretraining_epochs_per_layer'] 
##        state['pretraining_epochs_per_layer'] = 1 #Only 1 time over the dataset
##        if nb_file >=100:
##            sys.exit("The code does not support this much pretraining epoch (99 max with P07).\n"+
##            "You have to correct the code (and be patient, P07 is huge !!)\n"+
##             "or reduce the number of pretraining epoch to run the code (better idea).\n")
##        print('\n\tpretraining with P07')
##        optimizer.pretrain(datasets.nist_P07(min_file=0,max_file=nb_file)) 
    
    print ('Retrieve pre-train done earlier ( '+nom_pretrain+' )')
    

        
    sys.stdout.flush()
    channel.save()
    
    #Set some of the parameters used for the finetuning
    if state.has_key('finetune_set'):
        finetune_choice=state['finetune_set']
    else:
        finetune_choice=FINETUNE_SET
    
    if state.has_key('max_finetuning_epochs'):
        max_finetune_epoch_NIST=state['max_finetuning_epochs']
    else:
        max_finetune_epoch_NIST=MAX_FINETUNING_EPOCHS
    
    if state.has_key('max_finetuning_epochs_P07'):
        max_finetune_epoch_P07=state['max_finetuning_epochs_P07']
    else:
        max_finetune_epoch_P07=max_finetune_epoch_NIST
    
    #Decide how the finetune is done
    
    if finetune_choice == 0:
        print('\n\n\tfinetune with NIST\n\n')
        optimizer.reload_parameters(PATH+'params_pretrain.txt')
        optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1,decrease=decrease_lr)
        channel.save()
    if finetune_choice == 1:
        print('\n\n\tfinetune with P07\n\n')
        optimizer.reload_parameters(PATH+'params_pretrain.txt')
        optimizer.finetune(datasets.nist_P07(),datasets.nist_all(),max_finetune_epoch_P07,ind_test=0,decrease=decrease_lr)
        channel.save()
    if finetune_choice == 2:
        print('\n\n\tfinetune with P07 followed by NIST\n\n')
        optimizer.reload_parameters(PATH+'params_pretrain.txt')
        optimizer.finetune(datasets.nist_P07(),datasets.nist_all(),max_finetune_epoch_P07,ind_test=20,decrease=decrease_lr)
        optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=21,decrease=decrease_lr)
        channel.save()
    if finetune_choice == 3:
        print('\n\n\tfinetune with NIST only on the logistic regression on top (but validation on P07).\n\
        All hidden units output are input of the logistic regression\n\n')
        optimizer.reload_parameters(PATH+'params_pretrain.txt')
        optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1,special=1,decrease=decrease_lr)
        
        
    if finetune_choice==-1:
        print('\nSERIE OF 4 DIFFERENT FINETUNINGS')
        print('\n\n\tfinetune with NIST\n\n')
        sys.stdout.flush()
        optimizer.reload_parameters(PATH+'params_pretrain.txt')
        optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1,decrease=decrease_lr)
        channel.save()
        print('\n\n\tfinetune with P07\n\n')
        sys.stdout.flush()
        optimizer.reload_parameters(PATH+'params_pretrain.txt')
        optimizer.finetune(datasets.nist_P07(),datasets.nist_all(),max_finetune_epoch_P07,ind_test=0,decrease=decrease_lr)
        channel.save()
        print('\n\n\tfinetune with P07 (done earlier) followed by NIST (written here)\n\n')
        sys.stdout.flush()
        optimizer.reload_parameters('params_finetune_P07.txt')
        optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=21,decrease=decrease_lr)
        channel.save()
        print('\n\n\tfinetune with NIST only on the logistic regression on top.\n\
        All hidden units output are input of the logistic regression\n\n')
        sys.stdout.flush()
        optimizer.reload_parameters(PATH+'params_pretrain.txt')
        optimizer.finetune(datasets.nist_all(),datasets.nist_P07(),max_finetune_epoch_NIST,ind_test=1,special=1,decrease=decrease_lr)
        channel.save()
    
    channel.save()

    return channel.COMPLETE
def jobman_entrypoint(state, channel):
    # record mercurial versions of each package
    pylearn.version.record_versions(state,[theano,ift6266,pylearn])
    # TODO: remove this, bad for number of simultaneous requests on DB
    channel.save()

    # For test runs, we don't want to use the whole dataset so
    # reduce it to fewer elements if asked to.
    rtt = None
    if state.has_key('reduce_train_to'):
        rtt = state['reduce_train_to']
    elif REDUCE_TRAIN_TO:
        rtt = REDUCE_TRAIN_TO
 
    n_ins = 32*32
    n_outs = 62 # 10 digits, 26*2 (lower, capitals)
     
    examples_per_epoch = NIST_ALL_TRAIN_SIZE

    PATH = ''
    NIST_BY_CLASS=0



    print "Creating optimizer with state, ", state

    optimizer = SdaSgdOptimizer(dataset=datasets.nist_all(), 
                                    hyperparameters=state, \
                                    n_ins=n_ins, n_outs=n_outs,\
                                    examples_per_epoch=examples_per_epoch, \
                                    max_minibatches=rtt)	


    
    

    if os.path.exists(PATH+'params_finetune_NIST.txt'):
        print ('\n finetune = NIST ')
        optimizer.reload_parameters(PATH+'params_finetune_NIST.txt')
        if NIST_BY_CLASS == 1:
            print "NIST DIGITS"
            optimizer.training_error(datasets.nist_digits(),part=2)
            print "NIST LOWER CASE"
            optimizer.training_error(datasets.nist_lower(),part=2)
            print "NIST UPPER CASE"
            optimizer.training_error(datasets.nist_upper(),part=2)
        else:
            print "P07 valid"
            optimizer.training_error(datasets.nist_P07(),part=1)
            print "PNIST valid"
            optimizer.training_error(datasets.PNIST07(),part=1)
        
    
    if os.path.exists(PATH+'params_finetune_P07.txt'):
        print ('\n finetune = P07 ')
        optimizer.reload_parameters(PATH+'params_finetune_P07.txt')
        if NIST_BY_CLASS == 1:
            print "NIST DIGITS"
            optimizer.training_error(datasets.nist_digits(),part=2)
            print "NIST LOWER CASE"
            optimizer.training_error(datasets.nist_lower(),part=2)
            print "NIST UPPER CASE"
            optimizer.training_error(datasets.nist_upper(),part=2)
        else:
            print "P07 valid"
            optimizer.training_error(datasets.nist_P07(),part=1)
            print "PNIST valid"
            optimizer.training_error(datasets.PNIST07(),part=1)

    
    if os.path.exists(PATH+'params_finetune_NIST_then_P07.txt'):
        print ('\n finetune = NIST then P07')
        optimizer.reload_parameters(PATH+'params_finetune_NIST_then_P07.txt')
        if NIST_BY_CLASS == 1:
            print "NIST DIGITS"
            optimizer.training_error(datasets.nist_digits(),part=2)
            print "NIST LOWER CASE"
            optimizer.training_error(datasets.nist_lower(),part=2)
            print "NIST UPPER CASE"
            optimizer.training_error(datasets.nist_upper(),part=2)
        else:
            print "P07 valid"
            optimizer.training_error(datasets.nist_P07(),part=1)
            print "PNIST valid"
            optimizer.training_error(datasets.PNIST07(),part=1)
    
    if os.path.exists(PATH+'params_finetune_P07_then_NIST.txt'):
        print ('\n finetune = P07 then NIST')
        optimizer.reload_parameters(PATH+'params_finetune_P07_then_NIST.txt')
        if NIST_BY_CLASS == 1:
            print "NIST DIGITS"
            optimizer.training_error(datasets.nist_digits(),part=2)
            print "NIST LOWER CASE"
            optimizer.training_error(datasets.nist_lower(),part=2)
            print "NIST UPPER CASE"
            optimizer.training_error(datasets.nist_upper(),part=2)
        else:
            print "P07 valid"
            optimizer.training_error(datasets.nist_P07(),part=1)
            print "PNIST valid"
            optimizer.training_error(datasets.PNIST07(),part=1)
    
    if os.path.exists(PATH+'params_finetune_PNIST07.txt'):
        print ('\n finetune = PNIST07')
        optimizer.reload_parameters(PATH+'params_finetune_PNIST07.txt')
        if NIST_BY_CLASS == 1:
            print "NIST DIGITS"
            optimizer.training_error(datasets.nist_digits(),part=2)
            print "NIST LOWER CASE"
            optimizer.training_error(datasets.nist_lower(),part=2)
            print "NIST UPPER CASE"
            optimizer.training_error(datasets.nist_upper(),part=2)
        else:
            print "P07 valid"
            optimizer.training_error(datasets.nist_P07(),part=1)
            print "PNIST valid"
            optimizer.training_error(datasets.PNIST07(),part=1)
        
    if os.path.exists(PATH+'params_finetune_PNIST07_then_NIST.txt'):
        print ('\n finetune = PNIST07 then NIST')
        optimizer.reload_parameters(PATH+'params_finetune_PNIST07_then_NIST.txt')
        if NIST_BY_CLASS == 1:
            print "NIST DIGITS"
            optimizer.training_error(datasets.nist_digits(),part=2)
            print "NIST LOWER CASE"
            optimizer.training_error(datasets.nist_lower(),part=2)
            print "NIST UPPER CASE"
            optimizer.training_error(datasets.nist_upper(),part=2)
        else:
            print "P07 valid"
            optimizer.training_error(datasets.nist_P07(),part=1)
            print "PNIST valid"
            optimizer.training_error(datasets.PNIST07(),part=1)
    
    channel.save()

    return channel.COMPLETE