def getSVMTrainingData(datas):
	trainingDatas = []
	for rawData, output in datas:
		channels = zip(*rawData)
		allBins = []
		for channel in channels:
			downSampledData = fftDataExtraction.downSample(channel, rawSps, constants.samplesPerSecond, interpolate = True)
			
			transforms = fftDataExtraction.applyTransformsToWindows(fftDataExtraction.getFFTWindows(downSampledData), magnitude = True, polyFitSubtraction = 2)
			
			bins = fftDataExtraction.DoFrequencyBinning(transforms)
			allBins.append(bins)
			
		trainingInputs = map(lambda x: x[0] + x[1], zip(*allBins))
		
		trainingDatas += [(input, output) for input in trainingInputs]
		
	return trainingDatas
		fallDelayScores.append((score, fallDelay))
		
	fallDelay = min(fallDelayScores)[1]
	
	sqWave = squareWave(dataLength * 2, riseDelay, fallDelay, outputTimes)
	
	#pylab.plot(outputTimes, sqWave, '-o') ;pylab.plot(outputTimes, predictions, '-o') ;pylab.plot([0.0, 0.0], [1.5, -0.5]) ;pylab.show()
	
	return riseDelay, fallDelay
	
if __name__ == "__main__":
	constants.samplesPerSecond = int(constants.samplesPerSecond)
	
	#timeData, output = createChangingTimeDomainData(constants.baseFilename, low = 0, high = 10)
	timeData, output = createChangingTimeDomainDataPhaseMatch(constants.baseFilename, low = constants.lowPercent, high = constants.highPercent)
	
	dataWindows, outputs, outputTimes = getFFTWindows(timeData, output)
	
	transforms = fftDataExtraction.applyTransformsToWindows(dataWindows, magnitude = True)
	transforms = fftDataExtraction.DoFrequencyBinning(transforms)
	
	#svmAccuracy.printSvmValidationAccuracy(transforms, outputs)
	predictions = svmAccuracy.getAverageSVMPredictions(transforms, outputs)
	
	riseLat, fallLat = measureLatency(predictions, outputs, outputTimes)
	print 'rising latency: %dms' % riseLat
	print 'falling latency: %dms' % fallLat
	
	svmAccuracy.graphSvmLatency(predictions, outputs, timeData, outputTimes)