Exemplo n.º 1
0
    fprTrain, tprTrain = MCEvaluator.averageRocCurve(trainX, U, V)
    fprTest, tprTest = MCEvaluator.averageRocCurve(testX, U, V)
        
    return fprTrain, tprTrain, fprTest, tprTest

if saveResults: 
    paramList = []
    chunkSize = 1
    
    U, V = maxLocalAuc.initUV(X)
    
    for loss, rho in losses: 
        for trainX, testX in trainTestXs: 
            maxLocalAuc.loss = loss 
            maxLocalAuc.rho = rho 
            paramList.append((trainX, testX, maxLocalAuc.copy(), U.copy(), V.copy()))

    pool = multiprocessing.Pool(maxtasksperchild=100, processes=multiprocessing.cpu_count())
    resultsIterator = pool.imap(computeTestAuc, paramList, chunkSize)
    
    #import itertools 
    #resultsIterator = itertools.imap(computeTestAuc, paramList)
    
    meanFprTrains = []
    meanTprTrains = []
    meanFprTests = []
    meanTprTests = []
    
    for loss in losses: 
        fprTrains = [] 
        tprTrains = [] 
Exemplo n.º 2
0
 for trainX, testX in trainTestXs: 
     trainOmegaPtr = SparseUtils.getOmegaListPtr(trainX)
     testOmegaPtr = SparseUtils.getOmegaListPtr(testX)
     allOmegaPtr = SparseUtils.getOmegaListPtr(X)
     logging.debug("Number of non-zero elements: " + str((trainX.nnz, testX.nnz)))        
     
     paramList = []      
     
     for j, startAverage in enumerate(startAverages): 
         for i, (alpha, t0) in enumerate(learningRateParams):
             maxLocalAuc.startAverage = startAverage
             maxLocalAuc.alpha = alpha 
             maxLocalAuc.t0 = t0
             logging.debug(maxLocalAuc)
             
             learner = maxLocalAuc.copy()
             paramList.append((trainX, learner))
             
     pool = multiprocessing.Pool(maxtasksperchild=100, processes=multiprocessing.cpu_count())
     resultsIterator = pool.imap(computeTestObj, paramList, chunkSize)
 
     for j, startAverage in enumerate(startAverages): 
         for i, (alpha, t0) in enumerate(learningRateParams):
             U, V, trainObj, testObj = resultsIterator.next()
             
             trainObjectives[j, i] += trainObj
             testObjectives[j, i] += testObj
     
     pool.terminate()        
     
 trainObjectives /= folds 
Exemplo n.º 3
0
        
    return fprTrain, tprTrain, fprTest, tprTest

if saveResults: 
    paramList = []
    chunkSize = 1
    
    U, V = maxLocalAuc.initUV(X)
    
    for loss in losses: 
        for nnz in nnzs: 
            for trainX, testX in trainTestXs: 
                numpy.random.seed(21)
                modelSelectX, userInds = Sampling.sampleUsers2(trainX, nnz*trainX.nnz)
                maxLocalAuc.loss = loss 
                paramList.append((modelSelectX, trainX, testX, maxLocalAuc.copy(), U.copy(), V.copy()))

    pool = multiprocessing.Pool(maxtasksperchild=100, processes=multiprocessing.cpu_count())
    resultsIterator = pool.imap(computeTestAuc, paramList, chunkSize)
    
    #import itertools 
    #resultsIterator = itertools.imap(computeTestAuc, paramList)
    
    meanFprTrains = []
    meanTprTrains = []
    meanFprTests = []
    meanTprTests = []
    
    for loss in losses: 
        fprTrains = [] 
        tprTrains = [] 
Exemplo n.º 4
0
 def testCopy(self):
     u = 0.1
     eps = 0.001
     k = 10
     maxLocalAuc = MaxLocalAUC(k, u, alpha=5.0, eps=eps)
     maxLocalAuc.copy()
Exemplo n.º 5
0
 def testCopy(self): 
     u= 0.1
     eps = 0.001
     k = 10 
     maxLocalAuc = MaxLocalAUC(k, u, alpha=5.0, eps=eps)
     maxLocalAuc.copy()