def download_TransformedFeature_histograms( featureName, NNtype, numVisible, numHidden, numFolders, numFilesPerFolder, batchSize, learningRate, learningRateBoostFactor, corruptionLevel, timeStacking, frequencyStandardisation): # Load weights, biases, feature offset and feature scaling weights, biases = get_NN_WeightsAndBiases(NNtype, featureName, batchSize, learningRate, learningRateBoostFactor, corruptionLevel, timeStacking, frequencyStandardisation, numVisible, numHidden) featureOffset, featureScaling = get_NN_featureOffsetAndScaling( featureName, numFolders, numFilesPerFolder, timeStacking, numVisible, frequencyStandardisation) # Get histogram of transformed features hist = transformedFeatureFrequenciesHistogram(featureName, weights, biases, featureOffset, featureScaling, timeStacking, numFolders, numFilesPerFolder, numBins=100) # Save histogram resultsPath = transferPath + 'feature histograms/' createPath(resultsPath) pickle.dump(hist, open(resultsPath + featureName + '_crpt_%i_nin_%i_nhdn_%i_basz_%i_lnrt_%0.2f' \ % (corruptionLevel, numVisible, numHidden, batchSize, learningRate) + '_transformed_features_frequency_histograms.pkl', 'wb'))
def getWeightsAndBiases(self): if self.weights is None: self.weights, self.biases = get_NN_WeightsAndBiases( self._NNtype, self._featureName, self._batchSize, self._learningRate, self._learningRateBoostFactor, self._corruptionLevel, self._timeStacking, self._frequencyStandardisation, self._numVisible, self._numHidden) return self.weights, self.biases
'CRP Time Delay': settings[12], 'NCD Sequence Length': settings[13]} # Load feature file dict if required if len(featureNames) != 1: featureFileDict, pieceIds = loadFeatureFileDict(featureNames[0], numFolders, numFilesPerFolder) # load weights and biases if this is for a neural net run weightMatrix = None biases = None featureOffset = None featureScaling = None if settingsDict['NN Type'] is not None: weightMatrix, biases = get_NN_WeightsAndBiases(settingsDict['NN Type'], settingsDict['Feature Name'], numFolders, numFilesPerFolder, settingsDict['NN Batch Size'], settingsDict['NN Learning Rate'], settingsDict['NN Learning Rate Boost Factor'], settingsDict['NN Corruption Level'], settingsDict['NN Time Stacking'], frequencyStandardisation, numFeatures, settingsDict['NN # Hidden Units']) featureOffset, featureScaling = get_NN_featureOffsetAndScaling(settingsDict['Feature Name'], numFolders, numFilesPerFolder, settingsDict['NN Time Stacking'], numFeatures, frequencyStandardisation) print 'Generating transformed features...' transformedFeatureFileDict = FFP.generateTransformedFeatureFileDict( featureFileDict, numFeatures, settingsDict['Feature Name'], settingsDict['NN Type'], numFeatures, settingsDict['NN # Hidden Units'], weightMatrix, biases, featureOffset, featureScaling, settingsDict['NN Time Stacking'], processPool = processPool) else: try: featureOffset = FFP.featureOffset[settingsDict['Feature Name']] featureScaling = FFP.featureScaling[settingsDict['Feature Name']]