Пример #1
0
    def loadList(dbPath, objectType, whereList):

        ilist = objectList(objectType)

        query = db.getSelectQuery(objectType, whereList)
        conn = db.initDb(dbPath)
        dataList = db.executeReturnList(conn, query)
        for i in dataList:
            obj = ilist.objectType()
            #ilist.processPerProperty(lambda iobj, j: iobj.setValue(j, i[j]), appUtility.emptyFunc, appUtility.emptyFunc)
            appUtility.processObjectProp(obj, lambda iobj, j: iobj.setValue(j, i[j]))
            ilist.append(obj)

        db.closeDb(conn)

        return ilist
Пример #2
0
    def check_credentials(self, username, password):
        obj = user()

        obj.username = username
        obj.password = password

        query = db.getExistQuery(obj, ["username", "password"])
        conn = db.initDb(self.dbPath)
        doesExist = db.executeIfExist(conn, query)
        db.closeDb(conn)

        if doesExist:
            self.username = obj.username
            self.app.config['BASIC_AUTH_USERNAME'] = username
            return True
        else:
            return False
Пример #3
0
    def checkIfExist(dbPath, query, errorStatus, statusWhenExist):

        isPassed = True

        conn = db.initDb(dbPath)
        doesExist = db.executeIfExist(conn, query)
        db.closeDb(conn)

        if statusWhenExist == 0:
            if doesExist:
                isPassed = False
                abort(errorStatus)
        else:
            if not doesExist:
                isPassed = False
                abort(errorStatus)
        
        return isPassed
Пример #4
0
    logger.info( "X_train shape: " + strNone( dataInfo[ const.KEY_TRN_X_SHAPE ] ) )
    logger.info( "Y_train shape: " + strNone( dataInfo[ const.KEY_TRN_Y_SHAPE ] ) )
    logger.info( "X_dev  shape: "  + strNone( dataInfo[ const.KEY_DEV_X_SHAPE ] ) )
    logger.info( "Y_dev  shape: "  + strNone( dataInfo[ const.KEY_DEV_Y_SHAPE ] ) )

#     if ( hyperParams[ const.KEY_USE_WEIGHTS ] ) :
#         print ( "  Weights_train shape :", WEIGHT_train.shape )
#     print ()

    return datasetTrn, datasetDev, dataInfo


if __name__ == '__main__':

    # Init DB
    with db.initDb( const.APP_KEY, const.DB_DIR ) as conn :

        # test (debug)
        #db.test( conn )

        # Read selections
        selection = db.getSelection( conn, const.Selections.KEY_MAIN )

        # update datasets
        updateDatasets( conn, selection )

        # update machines
        ( iniMachines, configDatasources, configMachinesForms ) = updateMachines( conn )

        # Read datasets
        datasets = db.getDatasets( conn )
Пример #5
0
def train( tune = True) :

    # hyper parameters
    hyperParams = {}

    # use tensorboard
    isUseTensorboard = False

    ## Init tensorflow multi-threading
    # When TF 1.8 available...
#     config = tf.ConfigProto()
#     config.intra_op_parallelism_threads = 16
#     config.inter_op_parallelism_threads = 16
#     tf.session( config=config )
    
    # system info
    systemInfo = getSystemInfo( tf.__version__ )
    
    ## Units of layers
#     structure                                   = [ 1 ]
#     hyperParams[ const.KEY_MINIBATCH_SIZE ]     = 64
#     hyperParams[ const.KEY_NUM_EPOCHS ]         = 100
#     hyperParams[ const.KEY_USE_WEIGHTS ]        = False
#     hyperParams[ const.KEY_START_LEARNING_RATE ]= 0.003
#     hyperParams[ const.KEY_BETA ]               = 0
#     hyperParams[ const.KEY_KEEP_PROB ]          = 1

    ## Units of layers
#     structure                                   = [ 50, 24, 1 ]
#     hyperParams[ const.KEY_MINIBATCH_SIZE ]     = 64
#     hyperParams[ const.KEY_NUM_EPOCHS ]         = 2000
#     hyperParams[ const.KEY_USE_WEIGHTS ]        = False
#     hyperParams[ const.KEY_START_LEARNING_RATE ]= 0.0001
#     # From tuning run id=42
#     hyperParams[ const.KEY_BETA ]               = 2.4233061084214308e-15
#     hyperParams[ const.KEY_KEEP_PROB ]          = 10.646631549280114

    ## Units of layers
    structure                                    = [ 100, 48, 1 ]
    hyperParams[ const.KEY_MINIBATCH_SIZE ]      = 64
    hyperParams[ const.KEY_NUM_EPOCHS ]          = 2500
    hyperParams[ const.KEY_USE_WEIGHTS ]         = False
    hyperParams[ const.KEY_START_LEARNING_RATE ] = 0.0001
    hyperParams[ const.KEY_BETA ]                = 0
    hyperParams[ const.KEY_KEEP_PROB ]           = 1

    if tune : 
        # Tune params
        beta_min = 0.000000000000001
        beta_max = 0.5
        
        keep_prob_min = 0.5
        keep_prob_max = 1
    
        nbTuning = 20
        tuning= {}
        
        maxAccuracyDev = -9999999999999
        maxIdRun = -1
    else :
        nbTuning = 1
        
    ## Units of layers
#     structure = [ 50, 24, 12, 1 ]
#     num_epochs = 1000
#     # Result from tuning
#     beta = 0
#     keep_prob = 1
#     learning_rate = 0.0001

    #structure = [ 100, 48, 1 ]
    # Result from tuning
    #beta = 1.6980624617370184e-15
    #keep_prob = 0.724123179663981

#     structure = [ 25, 12, 1 ]
#     # Result from tuning
#     beta = 6.531654400821318e-14
#     keep_prob = 0.8213956561201344
#     learning_rate = 0.0001
#     num_epochs = 1500

    # Loading the dataset
    X_train_orig, Y_train_orig, PATH_train, TAG_train, WEIGHT_train, \
    X_dev_orig  , Y_dev_orig, PATH_dev, TAG_dev= \
        load_dataset( hyperParams[ const.KEY_USE_WEIGHTS ] )

    # Flatten the training and test images
    X_train_flatten = X_train_orig.reshape( X_train_orig.shape[0], -1 ).T
    X_dev_flatten = X_dev_orig.reshape( X_dev_orig.shape[0], -1 ).T
    # Normalize image vectors
    X_train = X_train_flatten / 255.
    X_dev   = X_dev_flatten / 255.

    Y_train = Y_train_orig
    Y_dev = Y_dev_orig

    print( "Structure:", structure )
    print()
    print ("number of training examples = " + str(X_train.shape[1]))
    print ("number of test examples = " + str(X_dev.shape[1]))
    print ("X_train shape: " + str(X_train.shape))
    print ("Y_train shape: " + str(Y_train.shape))
    print ("X_test shape: " + str(X_dev.shape))
    print ("Y_test shape: " + str(Y_dev.shape))
    print ()
    print ("Start Learning rate :", str( hyperParams[ const.KEY_START_LEARNING_RATE ] ) )
    print ("Num epoch           :", str( hyperParams[ const.KEY_NUM_EPOCHS ] ) )
    print ("Minibatch size      :", str( hyperParams[ const.KEY_MINIBATCH_SIZE ] ) )
    print ("Beta                :", str( hyperParams[ const.KEY_BETA ] ) )
    print ("keep_prob           :", str( hyperParams[ const.KEY_KEEP_PROB ] ) )
    print ("isLoadWeights       :", hyperParams[ const.KEY_USE_WEIGHTS ] )
    if ( hyperParams[ const.KEY_USE_WEIGHTS ] ) :
        print ( "  Weights_train shape :", WEIGHT_train.shape )

    dataInfo = {
        const.KEY_TRN_SIZE  : str( X_train.shape[1] ),
        const.KEY_DEV_SIZE       : str( X_dev.shape[1] ),
        const.KEY_TRN_SHAPE : str( X_train.shape ),
        const.KEY_DEV_SHAPE      : str( X_dev.shape ),
        const.KEY_TRN_Y_SIZE         : str( Y_dev.shape[1] ),
        const.KEY_TRN_Y_SHAPE        : str( Y_dev.shape ),
        const.KEY_DEV_Y_SIZE         : str( Y_dev.shape[1] ),
        const.KEY_DEV_Y_SHAPE        : str( Y_dev.shape ),
    }

    #
#    tuning( num_epochs = num_epochs, learning_rate = learning_rate )

    print()
    comment = input( "Run comment: " )

    # Start time
    tsGlobalStart = time.time()
   
    # Init DB
    with db.initDb( APP_KEY, DB_DIR ) as conn:

        for j in range( 1, nbTuning + 1 ) :
            
            if tune:
                print( "*****************************" )
                print( "Tune round", str( j ), "/", str( nbTuning ) )
                print( "*****************************" )
            
                # calculate beta
                logBeta = random.uniform( math.log10( beta_min ), math.log10( beta_max ) )
                beta = math.pow( 10, logBeta )
                print( "Beta = " + str( beta ))
            
                # calculate keep_prob
                logKeep_prob = random.uniform( math.log10( keep_prob_min ), math.log10( keep_prob_max ) )
                keep_prob = math.pow( 10, logKeep_prob )
                print( "keep_prob = " + str( keep_prob ))
                
                # update hyper params
                hyperParams[ const.KEY_BETA         ] = beta
                hyperParams[ const.KEY_KEEP_PROB    ] = keep_prob
        
            # Create run
            idRun = db.createRun( conn )
    
            # Update run before calling model
            db.updateRunBefore(
                conn, idRun,
                structure=structure, comment=comment,
                system_info=systemInfo, hyper_params=hyperParams, data_info=dataInfo
            )
    
            # Run model and update DB run with extra info
            _, accuracyDev, accuracyTrain = model(
                conn, idRun, structure,
                X_train, Y_train, PATH_train, TAG_train, WEIGHT_train,
                X_dev, Y_dev, PATH_dev, TAG_dev,
                X_train_orig, X_dev_orig,
                hyperParams,
                isTensorboard = isUseTensorboard,
                show_plot = not tune, extractImageErrors = not tune
            )
    
            # Print run
            run = db.getRun( conn, idRun )
            print( "Run stored in DB:", str( run ) )

            if tune :
                # Store results
                tuning[ j ] = { 
                    "beta": beta, "keep_prob": keep_prob, 
                    "accuracyDev": accuracyDev, "accuracyTrain": accuracyTrain
                }
            
                # Max
                if ( accuracyDev > maxAccuracyDev ) :
                    maxAccuracyDev = accuracyDev
                    maxHyperParams = tuning[ j ]
                    maxIdRun = idRun
                    
                # print max
                print( "Max DEV accuracy:", maxAccuracyDev )
                print( "Max hyper params:" )
                print( maxHyperParams )
            
                
        if tune :
            # Print tuning
            print( "Tuning:" , tuning )
            print()
            print( "Max DEV accuracy      :", maxAccuracyDev )
            print( "Max hyper params idRun:", maxIdRun )

    # Start time
    tsGlobalEnd = time.time()   
    globalElapsedSeconds = int( round( tsGlobalEnd - tsGlobalStart ) )

    print( "Finished in", globalElapsedSeconds, "seconds" )