Beispiel #1
0
    if not os.path.exists(resultsFn):

        cleanCRPfolder()
        cleanNCDfolder()
        startDateTime = datetime.now()

        # load weights etc. if this is for a neural net run
        if settingsDict['NN Type'] is not None:
            weightMatrix, biases, featureOffset, featureScaling = get_NN_NCD_params(
                NNtype=settingsDict['NN Type'],
                featureName=settingsDict['Feature Name'],
                learningRate=settingsDict['dA Learning Rate'],
                learningRateBoostFactor=settingsDict[
                    'dA Learning Rate Boost Factor'],
                corruptionLevel=settingsDict['dA Corruption Level'],
                numVisible=int(settingsDict['dA Num Visible Units']),
                numHidden=int(settingsDict['dA Num Hidden Units']),
                batchSize=int(settingsDict['dA Batch Size']),
                freqStd=bool(settingsDict['NN Frequency Standardisation']),
                NNnumFolders=int(settingsDict['NN Num Folders']),
                NNnumFilesPerFolder=int(
                    settingsDict['NN Num Files per Folder']),
                NNtimeStacking=int(settingsDict['NN Time Stacking']))
        else:
            weightMatrix = biases = featureOffset = featureScaling = None

        # Create NCD files
        for key in settingsDict.keys():
            print key, ':', settingsDict[key]
        createNCDfiles(
            existingNCDs=None,
# converting to FENS

# Get the folders (performances)
piecesPath = FFP.getRootPath(featureName)
piecesFolders = getFolderNames(piecesPath, 
                               contains = 'mazurka',
                               orderAlphabetically = True) # added the contains parameter to avoid the new powerspectrum folder

if numFolders is not None:
    piecesFolders = piecesFolders[: numFolders]

# Load weights and biases       
if NNtype is not None:
    weightMatrix, biases, featureOffset, featureScaling = get_NN_NCD_params(
                                                            NNtype, featureName, learningRate, learningRateBoostFactor,
                                                            corruptionLevel, numOriginalFeatures, numNewFeatures, batchSize, 
                                                            freqStd = frequencyStandardisation, NNnumFolders = numFolders, 
                                                            NNnumFilesPerFolder = numFilesPerFolder,
                                                            NNtimeStacking = timeStacking)
# Load (and optionally transform) the feature files
p = 0
featuresDataFrames = []
for piecesFolder in piecesFolders:
    performancesPath = FFP.getFeatureFolderPath(piecesPath + piecesFolder + '/', featureName)
    performances = getFileNames(performancesPath, 
                                orderAlphabetically = True, 
                                endsWith = '.csv')
    if numFilesPerFolder is not None:
        performances = performances[: numFilesPerFolder]
    for performance in performances:
        p+= 1
        print '\rloading feature file %i...' % p,
Beispiel #3
0
while currentDateTime < stopRunningAt:

    nextSettings = True
    iteration += 1

    while nextSettings is not None and currentDateTime < stopRunningAt:
        nextSettings = opt.getNextSettings()
        if nextSettings is not None:
            for setting in nextSettings:
                # load weights etc. if this is for a neural net run
                if NNtype is not None:
                    weightMatrix, biases, featureOffset, featureScaling = get_NN_NCD_params(
                        NNtype, featureName, learningRate,
                        learningRateBoostFactor,
                        setting['dA Corruption Level'],
                        setting['dA Num Visible Units'],
                        setting['dA Num Hidden Units'], batchSize, freqStd,
                        numFolders, numFilesPerFolder, timeStacking)
                else:
                    weightMatrix = biases = featureOffset = featureScaling = None

                # Calculate NCDs
                for key in setting.keys():
                    print key, ':', setting[key]
                NCDlist = calculateNCDs(processPool, featureName, numFeatures,
                                        setting['DownSample Factor'],
                                        setting['Time Delay'],
                                        setting['Dimension'], CRPmethod,
                                        setting['Neighbourhood Size'],
                                        numFolders, numFilesPerFolder,