Example #1
0
def main():
    parser = argparse.ArgumentParser("")

    parser.add_argument("--iterations", dest="iterations", type=int, default=100)
    parser.add_argument("--data-size", dest="dataSize", type=int, required=True)

    args = parser.parse_args()
    data = generateData(args.dataSize)

    Result.init()

    for serializeType in TYPES_LIST:
        serializedData = serializeBenchmark(args.iterations, serializeType, data)
        deserializedData = deserializeBenchmark(args.iterations, serializeType, serializedData)

    Result.report()
Example #2
0
#!/usr/bin/env python
#coding: utf-8

from matplotlib.pylab import *
# local module
import generate_data
import perceptron

trainset = generate_data.generateData(80)  # train set generation
testset = generate_data.generateData(20)  # test set generation
p = perceptron.Perceptron()  # use a short
p.train(trainset)

#Perceptron test
for x in testset:
    r = p.response(x)
    if r != x[2]:  # if the response is not correct
        print 'not hit.'
    if r == 1:
        plot(x[0], x[1], 'ob')
    else:
        plot(x[0], x[1], 'or')

# plot of the separation line.
# The centor of line is the coordinate origin
# So the length of line is 2
# The separation line is orthogonal to w
n = norm(p.w)  # aka the length of p.w vector
ww = p.w / n  # a unit vector
ww1 = [ww[1], -ww[0]]
ww2 = [-ww[1], ww[0]]
  # Set min and max for scalar encoder params.
  SCALAR_ENCODER_PARAMS["minval"] = minval
  SCALAR_ENCODER_PARAMS["maxval"] = maxval


if __name__ == "__main__":
  
  for noiseAmplitude in WHITE_NOISE_AMPLITUDE_RANGES:
    
    expParams = "\nRUNNING EXPERIMENT WITH PARAMS: numRecords=%s | noiseAmplitude=%s | signalAmplitude=%s | signalMean=%s | signalPeriod=%s \n\n"\
          %(NUM_RECORDS, noiseAmplitude, SIGNAL_AMPLITUDE, SIGNAL_MEAN, SIGNAL_PERIOD)
    outFile.write(expParams)
    print expParams    
    
    # Generate the data, and get the min/max values
    generateData(whiteNoise=True, noise_amplitude=noiseAmplitude)
    inputFile = os.path.join(DATA_DIR, "white_noise_%s.csv" % noiseAmplitude)
    minval, maxval = findMinMax(inputFile)
  
    _setupScalarEncoder(minval, maxval)

    # Create and run network on this data.
    #   Input data comes from a CSV file (scalar values, labels). The
    #   RecordSensor region allows us to specify a file record stream as the
    #   input source via the dataSource attribute.
    dataSource = FileRecordStream(streamID=inputFile)
    encoders = {"white_noise": SCALAR_ENCODER_PARAMS}
    network = createNetwork((dataSource, "py.RecordSensor", encoders))

    # Need to init the network before it can run.
    network.initialize()
Example #4
0
parser.add_option("-r",
                  "--run_default",
                  action="store_true",
                  dest="runDefault",
                  default=False,
                  help="run default run (detection mail")

(options, args) = parser.parse_args()

PATH = os.path.dirname(os.path.realpath(__file__))

if options.genData:
    lFRA = PATH + "/" + options.learnF
    lENG = PATH + "/" + options.learnE
    out = PATH + "/" + options.outputR
    generateData(lFRA, lENG, out)

if options.stats:
    lFRA = PATH + "/" + options.learnF
    lENG = PATH + "/" + options.learnE
    repertory = PATH + "/" + options.inputR
    out = PATH + "/" + options.outputR
    maxn = int(options.m)
    learning_stats(repertory, out, lFRA, lENG, maxn)
    os.chdir(PATH)

if options.detect:
    lFRA = PATH + "/" + options.learnF
    lENG = PATH + "/" + options.learnE
    repertory = PATH + "/" + options.inputR
    out = PATH + "/" + options.outputR
  importName = "model_params.%s_model_params" % metricName
    
  print "Importing model params from %s" % importName
  try:
    importedModelParams = importlib.import_module(importName).MODEL_PARAMS
  except ImportError:
    raise Exception("No model params exist for '%s'" % importName)
  return importedModelParams

if __name__ == "__main__":

  accuracyResults = [['signal_type', 'classification_accuracy']]

  for noiseAmplitude in WHITE_NOISE_AMPLITUDE_RANGES:
    #generate data
    generateData(dataDir=DATA_DIR, whiteNoise=True, noise_amplitude=noiseAmplitude)
    
    # generate model params
    signalType = 'white_noise'
    fileName = '%s/%s_%s.csv' % (DATA_DIR, signalType, noiseAmplitude)
    modelParamsName = '%s_model_params' % signalType
    createModelParams(MODEL_PARAMS_DIR, modelParamsName, fileName)

    # train and classify
    dataPath = "%s_%s.csv" % (signalType, noiseAmplitude)
    resultsPath = "%s/%s.csv" % (RESULTS_DIR, signalType)
    model = createModel(signalType)
    trainAndClassify(model, dataPath, resultsPath)
    
    #classification accuracy
    classificationAccuracy = computeClassificationAccuracy(resultsPath)
Example #6
0
def getModelParamsFromName(metricName):
  
  importName = "model_params.%s_model_params" % metricName
    
  print "Importing model params from %s" % importName
  try:
    importedModelParams = importlib.import_module(importName).MODEL_PARAMS
  except ImportError:
    raise Exception("No model params exist for '%s'" % importName)
  return importedModelParams

if __name__ == "__main__":

  # generate data
  generateData(whiteNoise=False, signal_mean=SIGNAL_MEAN, signal_amplitude=SIGNAL_AMPLITUDE)
  generateData(whiteNoise=True, signal_mean=SIGNAL_MEAN, signal_amplitude=SIGNAL_AMPLITUDE, noise_amplitude=WHITE_NOISE_AMPLITUDE)


  accuracyResults = [['signal_type', 'classification_accuracy']]

  for signalType in SIGNAL_TYPES:
    # generate model params
    fileName = '%s/%s.csv' % (DATA_DIR, signalType)
    modelParamsName = '%s_model_params' % signalType
    createModelParams(MODEL_PARAMS_DIR, modelParamsName, fileName)

    # train and classify
    dataPath = "%s.csv" % signalType
    resultsPath = "%s/%s.csv" % (RESULTS_DIR, signalType)
    model = createModel(signalType)
def getModelParamsFromName(metricName):
  
  importName = "model_params.%s_model_params" % metricName
    
  print "Importing model params from %s" % importName
  try:
    importedModelParams = importlib.import_module(importName).MODEL_PARAMS
  except ImportError:
    raise Exception("No model params exist for '%s'" % importName)
  return importedModelParams

if __name__ == "__main__":

  # generate data
  generateData(whiteNoise=False, signal_mean=10, signal_amplitude=1)
  generateData(whiteNoise=True, signal_mean=10, signal_amplitude=1, noise_amplitude=WHITE_NOISE_AMPLITUDE)

  for signalType in SIGNAL_TYPES:
    # generate model params
    fileName = '%s/%s.csv' % (DATA_DIR, signalType)
    modelParamsName = '%s_model_params' % signalType
    createModelParams(MODEL_PARAMS_DIR, modelParamsName, fileName)

    dataPath = "%s.csv" % signalType
    resultsPath = "%s/%s.csv" % (RESULTS_DIR, signalType)

    model = createModel(signalType)
    trainAndClassify(TRAINING_SET_SIZE, model, dataPath, resultsPath)
    computeClassificationAccuracy(resultsPath)
Example #8
0
parser.add_option("-r", "--run_default", action="store_true", dest="runDefault", default=False, help="run default run (detection mail")

(options, args) = parser.parse_args()

PATH = os.path.dirname(os.path.realpath(__file__))





if options.genData :
	lFRA = PATH + "/" + options.learnF
	lENG = PATH + "/" + options.learnE
	out = PATH + "/" + options.outputR
	generateData(lFRA, lENG, out)

if options.stats :
	lFRA = PATH + "/" + options.learnF
	lENG = PATH + "/" + options.learnE
	repertory = PATH + "/" + options.inputR
	out = PATH + "/" + options.outputR
	maxn = int(options.m)
	learning_stats(repertory, out, lFRA, lENG, maxn)
	os.chdir(PATH)

if options.detect :
	lFRA = PATH + "/" + options.learnF
	lENG = PATH + "/" + options.learnE
	repertory = PATH + "/" + options.inputR
	out = PATH + "/" + options.outputR
Example #9
0
#!/usr/bin/env python
#coding: utf-8

from matplotlib.pylab import *
# local module
import generate_data
import perceptron

trainset = generate_data.generateData(80) # train set generation
testset = generate_data.generateData(20) # test set generation
p = perceptron.Perceptron() # use a short
p.train(trainset)

#Perceptron test
for x in testset:
    r = p.response(x)
    if r != x[2]: # if the response is not correct
        print 'not hit.'
    if r == 1:
        plot(x[0], x[1], 'ob')
    else:
        plot(x[0], x[1], 'or')

# plot of the separation line. 
# The centor of line is the coordinate origin
# So the length of line is 2
# The separation line is orthogonal to w
n = norm(p.w) # aka the length of p.w vector
ww = p.w / n # a unit vector
ww1 = [ww[1], -ww[0]]
ww2 = [-ww[1], ww[0]]