Beispiel #1
0
startTime = time.clock() # start timing
logger.info('==============================================')
logger.info('Approach: [UMEAN, IMEAN, UPCC, IPCC, UIPCC].')

# load the dataset
dataTensor = dataloader.load(para)
logger.info('Loading data done.')

# run for each density
numTimeSlice = dataTensor.shape[2]
if para['parallelMode']: # run on multiple processes
    pool = multiprocessing.Pool()
    for cxtId in xrange(numTimeSlice):
    	dataMatrix = dataTensor[:, :, cxtId]
    	for density in para['density']:
			pool.apply_async(evaluator.execute, (dataMatrix, density, para, cxtId))
    pool.close()
    pool.join()
else: # run on single processes
    for cxtId in xrange(numTimeSlice):
    	dataMatrix = dataTensor[:, :, cxtId]
    	for density in para['density']:
			evaluator.execute(dataMatrix, density, para, cxtId)

# result handling
resulthandler.averageStats(para, numTimeSlice)

logger.info(time.strftime('All done. Total running time: %d-th day - %Hhour - %Mmin - %Ssec.',
         time.gmtime(time.clock() - startTime)))
logger.info('==============================================')
sys.path.remove('src')
Beispiel #2
0
startTime = time.clock() # start timing
logger.info('==============================================')
logger.info('PMF: Probabilistic Matrix Factorization.')

# load the dataset
dataTensor = dataloader.load(para)
logger.info('Loading data done.')

# run for each density
if para['parallelMode']: # run on multiple processes
    pool = multiprocessing.Pool()
    for cxtId in range(dataTensor.shape[2]):
    	dataMatrix = dataTensor[:, :, cxtId]
    	for density in para['density']:
			pool.apply_async(evaluator.execute, (dataMatrix, density, para, cxtId))
    pool.close()
    pool.join()
else: # run on single processes
    for cxtId in range(dataTensor.shape[2]):
        dataMatrix = dataTensor[:, :, cxtId]
        for density in para['density']:
			evaluator.execute(dataMatrix, density, para, cxtId)

# result handling
resulthandler.averageStats(para)

logger.info(time.strftime('All done. Total running time: %d-th day - %Hhour - %Mmin - %Ssec.',
         time.gmtime(time.clock() - startTime)))
logger.info('==============================================')
sys.path.remove('src')
Beispiel #3
0
# load the dataset
dataTensor = dataloader.load(para)
logger.info('Loading data done.')

# run for each density
numTimeSlice = dataTensor.shape[2]
if para['parallelMode']:  # run on multiple processes
    pool = multiprocessing.Pool()
    for cxtId in xrange(numTimeSlice):
        dataMatrix = dataTensor[:, :, cxtId]
        for density in para['density']:
            pool.apply_async(evaluator.execute,
                             (dataMatrix, density, para, cxtId))
    pool.close()
    pool.join()
else:  # run on single processes
    for cxtId in xrange(numTimeSlice):
        dataMatrix = dataTensor[:, :, cxtId]
        for density in para['density']:
            evaluator.execute(dataMatrix, density, para, cxtId)

# result handling
resulthandler.averageStats(para, numTimeSlice)

logger.info(
    time.strftime(
        'All done. Total running time: %d-th day - %Hhour - %Mmin - %Ssec.',
        time.gmtime(time.clock() - startTime)))
logger.info('==============================================')
sys.path.remove('src')
Beispiel #4
0
logger.info('Approach: [UMEAN, IMEAN, UPCC, IPCC, UIPCC].')

# load the dataset
dataTensor = dataloader.load(para)
logger.info('Loading data done.')

# run for each density
if para['parallelMode']:  # run on multiple processes
    pool = multiprocessing.Pool()
    for cxtId in range(dataTensor.shape[2]):
        dataMatrix = dataTensor[:, :, cxtId]
        for density in para['density']:
            pool.apply_async(evaluator.execute,
                             (dataMatrix, density, para, cxtId))
    pool.close()
    pool.join()
else:  # run on single processes
    for cxtId in range(dataTensor.shape[2]):
        dataMatrix = dataTensor[:, :, cxtId]
        for density in para['density']:
            evaluator.execute(dataMatrix, density, para, cxtId)

# result handling
resulthandler.averageStats(para, dataTensor.shape[2])

logger.info(
    time.strftime(
        'All done. Total running time: %d-th day - %Hhour - %Mmin - %Ssec.',
        time.gmtime(time.clock() - startTime)))
logger.info('==============================================')
sys.path.remove('src')
Beispiel #5
0
logger.info('==============================================')
logger.info('Approach: [UMEAN, IMEAN, UPCC, IPCC, UIPCC].')

# load the dataset
dataTensor = dataloader.load(para)
logger.info('Loading data done.')

# run for each density
endSlice = dataTensor.shape[2]
startSlice = int(endSlice * (1 - para['slicesToTest']))
if para['parallelMode']: # run on multiple processes
    pool = multiprocessing.Pool()
    for cxtId in xrange(startSlice, endSlice):
    	dataMatrix = dataTensor[:, :, cxtId]
    	for density in para['density']:
			pool.apply_async(evaluator.execute, (dataMatrix, density, para, cxtId))
    pool.close()
    pool.join()
else: # run on single processes
    for cxtId in xrange(startSlice, endSlice):
    	dataMatrix = dataTensor[:, :, cxtId]
    	for density in para['density']:
			evaluator.execute(dataMatrix, density, para, cxtId)

# result handling
resulthandler.averageStats(para, endSlice)

logger.info(time.strftime('All done. Total running time: %d-th day - %Hhour - %Mmin - %Ssec.',
         time.gmtime(time.clock() - startTime)))
logger.info('==============================================')
sys.path.remove('src')