예제 #1
0
def recognize(url):
  net = loadnet("ffnet.net")
  
  all_answers = []
  n_iter = 0
  while True:
    n_iter = n_iter + 1
    if n_iter > 15: break    
    print "iteration ", n_iter

    stream = ImageFileIO.ImageFileIO(urllib.urlopen(url))
    pat = loadimg(stream)
    if len(pat) != 6:
      continue

    answer = ""
    for letter in pat:
      arr = np.array(getbits(letter))
      answer += alphabet[net(arr).argmax()]
    all_answers.append(answer)
        
    result = []
    for x in range(0,6):
      what, count = collections.Counter([a[x] for a in all_answers]).most_common(1)[0]
      if count < 3: break # majority is 3 votes
      result.append(what)

    if len(result) == 6:
      print result
      break

  print all_answers
  Image.open(ImageFileIO.ImageFileIO(urllib.urlopen(url))).show()
예제 #2
0
def _main(argv):
    from ffnet import loadnet
    from utils import import_letter
    if len(argv) < 3:
        print >> sys.stderr, 'Usage: {0} network_file.net letter_file.png'.format(argv[0])
        exit(1)
    net = loadnet(argv[1])
    ipt = import_letter(argv[2])
    output = net(ipt)
    letter = output.argmax()
    print chr(65+letter)
예제 #3
0
def main():
    # load network
    net = loadnet('expData/rt_rr_statwosysdsk.network')
    timesSuggested = 0
    loglines = open('expData/partTrace.csv')
    for line in loglines:
        metrics = line.split(',')
        rt = float(metrics[1])
        w = int(metrics[2])
        carr = [float(metrics[i]) for i in [3, 4, 6, 7, 10, 11, 12, 13]]
        inputs = numpy.array(carr)
        o, r = net.test([inputs], [100], iprint=0)
        print "Avg. RT: " + str(rt) + "  Workers: " + str(
            w) + " Predicted RT: " + str(o[0][0])
        print "Request rate and stat: ",
        for i in carr:
            print i,
        print

        k = 0
        while rt > 100:
            #			inputs=numpy.array(carr)*(float(w)/float(w+k))
            inputs = numpy.array(estimateMetrics(carr, w, k))
            print "\t\tEstimates: ",
            for i in inputs:
                print i,
            print
            o, r = net.test([inputs], [100], iprint=0)
            rt = o[0][0]
            print "\t\tPredicted RT:", rt, " Workers: ", w + k
            if rt < 100:
                break
            k += 1

        rt = float(metrics[1])
        while rt < 70 and w > 1:
            k -= 1
            #			inputs=numpy.array(carr)*(float(w)/float(w+k))
            inputs = numpy.array(estimateMetrics(carr, w, k))
            print "\t\tEstimates: ",
            for i in inputs:
                print i,
            print
            o, r = net.test([inputs], [100], iprint=0)
            rt = o[0][0]
            print "\t\tPredicted RT:", rt, " Workers: ", w + k
            if rt > 70:
                k += 1
                break
            if w + k == 1:
                break
        if k != 0:
            print "\tSuggested k", k
예제 #4
0
def recognize(imgpath):
    net = loadnet("captcha.net")
    pics = loadimg(imgpath)
    if len(pics) != 5:
        print "skipping, recognized symbols:" + str(len(pics))
        return

    res = ""
    for letter in pics:
        inputbits = getbits(drawletter(letter))
        res = res + recognizeletter(net, inputbits)
    print res
예제 #5
0
파일: captcha.py 프로젝트: Greyh4t/Captcha
def recognize(imgpath):
  net = loadnet("captcha.net")
  pics = loadimg(imgpath)
  if len(pics) != 5:
    print "skipping, recognized symbols:" + str(len(pics))
    return;

  res = ""
  for letter in pics:
    inputbits = getbits(drawletter(letter))
    res = res + recognizeletter(net, inputbits)
  print res
예제 #6
0
def import_network(ANNname, Nlayers, Nneurons):
    from ffnet import loadnet
    import os
    try:
        f = '{0:}_{1:}_{2:}'.format(ANNname, Nlayers, Nneurons)
        net = loadnet(f)
        print 'Loaded network from file: ', f
        print 'Network limits:', net.inlimits
        return net

    except Exception, e:
        print 'Error when loading ffnet network file', e
        raise (e)
예제 #7
0
 def __init__(self):
     super(NeuralNetwork, self).__init__()
     self.field = Field(20, 20)
     
     self.outputs = []
     
     self.input = []
     self.target = []
     
     b = QtGui.QPushButton("Learn!")
     self.connect(b, QtCore.SIGNAL("clicked()"), self.learn)
     
     self.outcomes_list = QtGui.QComboBox()
     self._add_output("Square")
     self._add_output("Triangle")
     self._add_output("Line")
     
     hpanel = QtGui.QHBoxLayout()
     hpanel.addWidget(self.outcomes_list)
     hpanel.addWidget(b)
     
     btn_classify = QtGui.QPushButton("Classify")
     self.connect(btn_classify, QtCore.SIGNAL("clicked()"), self.classify)
     
     btn_clear = QtGui.QPushButton("Clear")
     self.connect(btn_clear, QtCore.SIGNAL("clicked()"), self.clear)
     
     self.label_output = QtGui.QLabel()
     self.label_output.setMaximumHeight(20)
     
     self.label_epoch = QtGui.QLabel()
     self.label_epoch.setMaximumHeight(20)
     
     vpanel = QtGui.QVBoxLayout()
     vpanel.addWidget(self.field)
     vpanel.addLayout(hpanel)
     vpanel.addWidget(self.label_output)
     vpanel.addWidget(self.label_epoch)
     vpanel.addWidget(btn_classify)
     vpanel.addWidget(btn_clear)
     
     self.setLayout(vpanel)
     
     try:
         self.net, self.epoch = loadnet("netdata.dat")
     except IOError:
         conec = mlgraph((self.field.x*self.field.y, 10, 10, 3))
         self.net = ffnet(conec)
         self.epoch = 0
예제 #8
0
    def __init__(self):
        super(NeuralNetwork, self).__init__()
        self.field = Field(20, 20)

        self.outputs = []

        self.input = []
        self.target = []

        b = QtGui.QPushButton("Learn!")
        self.connect(b, QtCore.SIGNAL("clicked()"), self.learn)

        self.outcomes_list = QtGui.QComboBox()
        self._add_output("Square")
        self._add_output("Triangle")
        self._add_output("Line")

        hpanel = QtGui.QHBoxLayout()
        hpanel.addWidget(self.outcomes_list)
        hpanel.addWidget(b)

        btn_classify = QtGui.QPushButton("Classify")
        self.connect(btn_classify, QtCore.SIGNAL("clicked()"), self.classify)

        btn_clear = QtGui.QPushButton("Clear")
        self.connect(btn_clear, QtCore.SIGNAL("clicked()"), self.clear)

        self.label_output = QtGui.QLabel()
        self.label_output.setMaximumHeight(20)

        self.label_epoch = QtGui.QLabel()
        self.label_epoch.setMaximumHeight(20)

        vpanel = QtGui.QVBoxLayout()
        vpanel.addWidget(self.field)
        vpanel.addLayout(hpanel)
        vpanel.addWidget(self.label_output)
        vpanel.addWidget(self.label_epoch)
        vpanel.addWidget(btn_classify)
        vpanel.addWidget(btn_clear)

        self.setLayout(vpanel)

        try:
            self.net, self.epoch = loadnet("netdata.dat")
        except IOError:
            conec = mlgraph((self.field.x * self.field.y, 10, 10, 3))
            self.net = ffnet(conec)
            self.epoch = 0
예제 #9
0
def verify(data): 
  print "loading ffnet"
  net = loadnet("ffnet.net")
  
  success = 0;
  print "verifying %d samples" % len(data)

  for inp,out in data:
    arr = np.array(inp)
    result = net(arr)
    expected = alphabet[out.index(max(out))]
    recognized = alphabet[result.argmax()]
    if expected == recognized:
      success = success + 1
  print "%d of %d recognized, precision is %f" % (success, len(data), success / (len(data) * 1.0))
예제 #10
0
    def save_network(self):
        # Save/load/export network
        from ffnet import savenet, loadnet, exportnet

        print "Network is saved..."
        savenet(self.net, "xor.net")
        print "Network is reloaded..."
        net = loadnet("xor.net")
        print "Network is tested again, but nothing is printed..."
        output, regression = net.test(input, target, iprint=0)
        print
        print "Exporting trained network to the fortran source..."
        exportnet(net, "xor.f")
        print "Done..."
        print "Look at the generated xor.f file."
        print "Note: you must compile xor.f along with the ffnet.f"
        print "file which can be found in ffnet sources."
def load_speaker_recognition_newtork(filename, create_new=False):
    """
    Load or create (if you  want) network for speker recognition form file

    returns tuple: (network, people_names, people_number)
    """
    people = voice_sample.get_names(); 
    people_num = len(people)
    network = None

    try:
        network = loadnet(filename)
    except IOError, ex:
        if create_new:
            
            network = ffnet(mlgraph((LPC_COE_NUM, people_num + LPC_COE_NUM,
            #network = ffnet(mlgraph((LPC_COE_NUM, 10,
                people_num)) )
예제 #12
0
파일: ffnetui.py 프로젝트: mrkwjc/ffnetui
def test():
    app = FFnetApp()
    # Add test network
    from ffnet import loadnet

    n = app.network
    path = "data/testnet.net"
    n.net = loadnet(path)
    n.filename = path
    ## Add test data
    app.data.input_loader.filename = "data/black-scholes-input.dat"
    app.data.target_loader.filename = "data/black-scholes-target.dat"
    app.data.load()
    app.mode = "train"
    app._arrange_plots()
    # Run
    app.configure_traits()
    return app
예제 #13
0
def load_speaker_recognition_newtork(filename, create_new=False):
    """
    Load or create (if you  want) network for speker recognition form file

    returns tuple: (network, people_names, people_number)
    """
    people = voice_sample.get_names()
    people_num = len(people)
    network = None

    try:
        network = loadnet(filename)
    except IOError, ex:
        if create_new:

            network = ffnet(
                mlgraph((
                    LPC_COE_NUM,
                    people_num + LPC_COE_NUM,
                    #network = ffnet(mlgraph((LPC_COE_NUM, 10,
                    people_num)))
예제 #14
0
def Plot_Scatter_ANN_LUT(net_file, LUT_file, coeff, dir2dir=False):

    import numpy as np
    import matplotlib.pyplot as plt
    import ffnet as ff

    LUT = Getting_Arrays(LUT_file)
    net = ff.loadnet(net_file)

    src = LUT['src']
    trg = LUT['T'] if dir2dir else LUT['S']

    net_out = net(src)
    rel_error = np.minimum(
        np.abs(
            (net_out[:, coeff] - trg[:, coeff]) / trg[:, coeff]), 1.1) * 100.0

    rmse, rel_rmse = RMSE(net_out[:, coeff], trg[:, coeff])

    plt.plot([-0.1, 1.1], [-0.1, 1.1], color='black')
    plt.scatter(net_out[:, coeff],
                trg[:, coeff],
                s=3,
                cmap='RdYlGn_r',
                c=rel_error,
                lw=0.0)
    ticks = np.linspace(0.0, 100.0, 11)
    cb = plt.colorbar(ticks=ticks)
    cb.set_label('rel. error [%]')

    plt.xlim([-0.1, 1.1])
    plt.ylim([-0.1, 1.1])
    plt.title('%d    ::   %f   ::   %f' % (coeff, rmse, rel_rmse))
    plt.grid()

    return
예제 #15
0
def _main(argv):
    if len(argv) < 3:
        print >> sys.stderr, USAGE.format(argv[0])
        exit(1)

    net_filename = argv[1]

    if argv[2] == 'train':
        if len(argv) < 4:
            print >> sys.stderr, USAGE.format(argv[0])
            exit(1)
        pattern_filename = argv[3]

        train_method = "momentum"
        if len(argv) > 4:
            train_method = argv[4]

#        number of train runs does not affect the network
#        trained_up_to_times = int(argv[4])
        trained_up_to_times = 1

        print("Will train networks on: {0}...".format(pattern_filename))
        create_then_save_network_trained_on(
                net_filename,
                pattern_filename,
                train_method)

    elif argv[2] == 'regress':
        input, output = load_snns('letters.pat')
        net = loadnet(net_filename)
        regression_analysis(net, input, output)

    elif argv[2] == 'letter':
        if len(argv) < 4:
            print >> sys.stderr, USAGE.format(argv[0])
            exit(1)
        input, _ = load_snns('letters.pat')
        net = loadnet(net_filename)
        letter = ord(argv[3][0])-65
        if letter < 0 or letter > 25:
            print >> sys.stderr, "Letter must be uppercase A-Z"
            exit(1)
        plot_net_output(net, input[letter])

    elif argv[2] == 'test':
        if len(argv) < 4:
            print >> sys.stderr, USAGE.format(argv[0])
            exit(1)

        max_noise_num = int(argv[3])

        input, target = load_snns('letters.pat')

        # Load net
        net = loadnet(net_filename)
        # plot and save results
        print("Plotting results...")
        plot_save_regressions(
                compute_regressions_for_noise_amount(net, input, target, max_noise_num),
                net_filename)
    elif argv[2] == "compare":
        if len(argv) < 5:
            print >> sys.stderr, USAGE.format(argv[0])
            exit(1)
        input, target = load_snns('letters.pat')

        filenames = [argv[1]] + argv[4:]
        max_noise_num = int(argv[3])
        plot_save_regressions_compare(filenames, input, target, max_noise_num)
예제 #16
0
# Train network
#first find good starting point with genetic algorithm (not necessary, but may be helpful)
print "FINDING STARTING WEIGHTS WITH GENETIC ALGORITHM..."
net.train_genetic(input, target, individuals=20, generations=500)
#then train with scipy tnc optimizer
print "TRAINING NETWORK..."
net.train_tnc(input, target, maxfun=1000, messages=1)

# Test network
print
print "TESTING NETWORK..."
output, regression = net.test(input, target, iprint=2)

# Save/load/export network
from ffnet import savenet, loadnet, exportnet

print "Network is saved..."
savenet(net, "xor.net")
print "Network is reloaded..."
net = loadnet("xor.net")
print "Network is tested again, but nothing is printed..."
output, regression = net.test(input, target, iprint=0)
print
print "Exporting trained network to the fortran source..."
exportnet(net, "xor.f")
print "Done..."
print "Look at the generated xor.f file."
print "Note: you must compile xor.f along with the ffnet.f"
print "file which can be found in ffnet sources."
예제 #17
0
파일: xor.py 프로젝트: cephdon/meta-core
target  = [[1.], [0.], [0.], [1.]]

# Train network
#first find good starting point with genetic algorithm (not necessary, but may be helpful)
print "FINDING STARTING WEIGHTS WITH GENETIC ALGORITHM..."
net.train_genetic(input, target, individuals=20, generations=500)
#then train with scipy tnc optimizer
print "TRAINING NETWORK..."
net.train_tnc(input, target, maxfun = 1000, messages=1)

# Test network
print
print "TESTING NETWORK..."
output, regression = net.test(input, target, iprint = 2)

# Save/load/export network
from ffnet import savenet, loadnet, exportnet
print "Network is saved..."
savenet(net, "xor.net")
print "Network is reloaded..."
net = loadnet("xor.net")
print "Network is tested again, but nothing is printed..."
output, regression = net.test(input, target, iprint = 0)
print
print "Exporting trained network to the fortran source..."
exportnet(net, "xor.f")
print "Done..."
print "Look at the generated xor.f file."
print "Note: you must compile xor.f along with the ffnet.f" 
print "file which can be found in ffnet sources."
예제 #18
0
def plot_save_regressions_compare(net_filenames, input, target, max_noise_amount):
    from pylab import (imshow,plot,legend,xticks,ylim,xlim,axhline,title,
            xlabel,ylabel,arange,show,cm,figure,savefig,save,imsave)
    names = [ n.rsplit('.',1)[0].rsplit('_',1).pop() for n in net_filenames ]
    networks = [ loadnet(filename) for filename in net_filenames ]
    regressions = [ compute_regressions_for_noise_amount(
        net, input, target, max_noise_amount) for net in networks ]


    # how many noise levels we have to draw
    N = max_noise_amount+1
    print("Will plot for for {} noise levels...".format(N))

    ind = arange(N)   # the x locations for the groups
    print("ind = {}".format(ind))
    width = 0.35       # the width of the bars

#    projection id -> name, as returned into tuples by http://ffnet.sourceforge.net/apidoc.html#ffnet.ffnet.test
    y_name = ["slope",
        "intercept",
        "r-value",
        "p-value",
        "slope stderr",
        "estim. stderr"]

    for projection_id in range(6): # todo has bug? how do i select the data
        #subplot(11 + projection_id * 100) # a new plot
        figure()

        projection_name = y_name[projection_id]
        ylabel(projection_name)
        ylim(0,1)
        print("Plotting for projection: " + projection_name)

        title(projection_name + " for noise levels...") # todo change me?

        for name, regress in zip(names, regressions):
            projections = regress.T[projection_id]
            print("Projections on {} tuple field ({}) = {}".format(
                projection_id, projection_name, projections))
            plot(ind, projections, label = name)

#      for i in ind:
#          bar(i, projections[i], width, color='b') # plot it
#        bar(ind, projections[ind], width, color='b') # plot it



        xticks(ind, range(0, N)) # todo print noise levels
        xlim(0,N-1)
        axhline(linewidth=1, color='black')
        xlabel("Noise amount")
        legend()

#        debug uncomment to look at graphs
#        show()
        plot_output_formats = ['png', 'eps']
        for format in plot_output_formats:
            plot_name = re_sub(
                    "[^a-z]",
                    "_",
                    y_name[projection_id].lower() )

            plot_filename = "compare_plot_{}.{}".format(
                    plot_name,
                    format)
            savefig(plot_filename, orientation='portrait')
            print("Saved plot as: {}.".format(plot_filename))
예제 #19
0
        input_data, certainty = get_nnet_input_data(m)
        output_data = np.array(binary_solution, dtype=np.float32)

        if X_train is None:
            X_train = input_data
            Y_train = output_data
        else:
            X_train = np.concatenate((X_train, input_data), axis=0)
            Y_train = np.concatenate((Y_train, output_data), axis=0)

    net.train_tnc(X_train, Y_train, nproc=4, maxfun=200000, messages=2)
    savenet(net, "morse.net")

else:
    net = loadnet("morse.net")

## ===============================
## GENERATE SUBMISSION
## ===============================
with open('sampleSubmission.csv') as f:
    trainingset = f.read().split("\n")

files = [(('000' + x.split(",")[0])[-3:], x.split(",")[1])
         for x in trainingset[1:] if "," in x]

f = open('submission.csv', 'w')
f.write("ID,Prediction\n")

for filenum, m.text in files:
예제 #20
0
 def load(self, filename):
     self.network = loadnet(filename)
예제 #21
0
ind_net = np.linspace(0, len(dim_LUT) - 1, num=net_res)
val, ind = [], []
for x in var:
    if x in LUT.keys():
        if x == dim:
            val.append(dim_net)
            ind.append(ind_net)
        else:
            val.append(
                np.ones((len(dim_net), ), dtype=float) * LUT[x][const_val[x]])
            ind.append(
                np.ones((len(dim_net), ), dtype=float) * float(const_val[x]))

val, ind = np.array(val).T, np.array(ind).T

net = ff.loadnet(net_file)
net_out = net(ind)
coef_name = 'T' if dir2dir else 'S'
trg = LUT[coef_name].reshape(
    (len(LUT['dz']), len(LUT['kabs']), len(LUT['ksca']), len(LUT['g']),
     LUT[coef_name].shape[1]))
err = LUT[coef_name + '_tol'].reshape(
    (len(LUT['dz']), len(LUT['kabs']), len(LUT['ksca']), len(LUT['g']),
     LUT[coef_name].shape[1]))

if dim == 'dz':
    trg = trg[:, const_val['kabs'], const_val['ksca'], const_val['g'], :]
    err = err[:, const_val['kabs'], const_val['ksca'], const_val['g'], :]
elif dim == 'kabs':
    trg = trg[const_val['dz'], :, const_val['ksca'], const_val['g'], :]
    err = err[const_val['dz'], :, const_val['ksca'], const_val['g'], :]
예제 #22
0
def ANN_to_NetCDF(net, out_file, iprint=True, **data):
    import netCDF4 as nc
    import numpy as np
    import ffnet as ff

    if iprint:
        print 'Exporting Network "{}" to .nc file :: {}'.format(net, out_file)
    if type(net) == str:
        network = ff.loadnet(net_file)
    else:
        network = net

    # transpose arrays to fortran order
    Tweights = network.weights.T
    Tconec = network.conec.T
    Tunits = network.units.T
    Tinno = network.inno.T
    Toutno = network.outno.T
    Teni = network.eni.T
    Tdeo = network.deo.T
    Tinlimits = network.inlimits.T
    for key, val in data.iteritems():
        data[key] = val.T

    dataset = nc.Dataset(out_file, 'w', format='NETCDF4')

    dataset.createDimension('weights_dim1', np.shape(Tweights)[0])
    dataset.createDimension('conec_dim1',
                            np.shape(Tconec)[0])
    dataset.createDimension('conec_dim2',
                            np.shape(Tconec)[1])
    dataset.createDimension('units_dim1', np.shape(Tunits)[0])
    dataset.createDimension('inno_dim1', np.shape(Tinno)[0])
    dataset.createDimension('outno_dim1', np.shape(Toutno)[0])
    dataset.createDimension('eni_dim1',
                            np.shape(Teni)[0])
    dataset.createDimension('eni_dim2',
                            np.shape(Teni)[1])
    dataset.createDimension('deo_dim1',
                            np.shape(Tdeo)[0])
    dataset.createDimension('deo_dim2',
                            np.shape(Tdeo)[1])
    dataset.createDimension('inlimits_dim1',
                            np.shape(Tinlimits)[0])
    dataset.createDimension('inlimits_dim2',
                            np.shape(Tinlimits)[1])
    for key, val in data.iteritems():
        dataset.createDimension('pspace.{}_dim1'.format(key),
                                np.shape(data[key])[0])

    weights = dataset.createVariable('weights', 'f8', 'weights_dim1')
    conec = dataset.createVariable('conec', 'i', ('conec_dim1', 'conec_dim2'))
    units = dataset.createVariable('units', 'f8', 'units_dim1')
    inno = dataset.createVariable('inno', 'i', 'inno_dim1')
    outno = dataset.createVariable('outno', 'i', 'outno_dim1')
    eni = dataset.createVariable('eni', 'f8', ('eni_dim1', 'eni_dim2'))
    deo = dataset.createVariable('deo', 'f8', ('deo_dim1', 'deo_dim2'))
    inlimits = dataset.createVariable('inlimits', 'f8',
                                      ('inlimits_dim1', 'inlimits_dim2'))
    dataset_list = {}
    for key, val in data.iteritems():
        dataset_list[key] = dataset.createVariable(
            'pspace.{}'.format(key), 'f8', 'pspace.{}_dim1'.format(key))

    weights[:] = Tweights
    conec[:] = Tconec
    units[:] = Tunits
    inno[:] = Tinno
    outno[:] = Toutno
    eni[:] = Teni
    deo[:] = Tdeo
    inlimits[:] = Tinlimits
    for key, var in dataset_list.iteritems():
        var[:] = data[key]

    dataset.close()

    return
        input_data, certainty = get_nnet_input_data(m)
        output_data = np.array(binary_solution, dtype=np.float32)

        if X_train is None:
            X_train = input_data
            Y_train = output_data
        else:
            X_train = np.concatenate((X_train, input_data), axis=0)
            Y_train = np.concatenate((Y_train, output_data), axis=0)

    net.train_tnc(X_train, Y_train, nproc=4, maxfun=200000, messages=2)
    savenet(net, "morse.net")

else:
    net = loadnet("morse.net")

## ===============================
## GENERATE SUBMISSION
## ===============================
with open('sampleSubmission.csv') as f:
    trainingset = f.read().split("\n")

files = [(('000' + x.split(",")[0])[-3:], x.split(",")[1]) for x in trainingset[1:] if "," in x]

f = open('submission.csv','w')
f.write("ID,Prediction\n")

for filenum, m.text in files:

    if len(m.text) > 0:
예제 #24
0
def main():
    # load network
    net = loadnet('prcntrt_rr_statwosysdsk.network')
    plog = open('predict.log', 'w')
    metriclog = open('metric.log', 'wb')
    mlog = csv.writer(metriclog)
    scalelog = open('scale.log', 'w')
    accesslog = open('/var/log/apache2/access.log', 'r')
    loglines = follow(accesslog)
    first = True
    cts = -1
    RT = 0
    RTs = []
    N = 0
    timesSuggested = 0
    workerStatus = workerInit()
    w = sum(workerStatus.values())
    #	exit(0)
    for line in loglines:
        try:
            if first:
                matches = re.search(
                    '.*:([0-9]*:[0-9]*:[0-9])[0-9] .* ([0-9]*)', line)
                cts = matches.group(1)
                RT = float(matches.group(2)) / 1000.
                RTs.append(RT)
                N = 1
                first = False
            else:
                #			print line
                matches = re.search(
                    '.*:([0-9]*:[0-9]*:[0-9])[0-9] .* ([0-9]*)', line)
                ts = matches.group(1)
                if cts == ts:
                    RTs.append(float(matches.group(2)) / 1000.)
                    RT += float(matches.group(2)) / 1000.
                    N += 1
                elif cts < ts or (ts[0:7] == "00:00:0"
                                  and cts[0:7] == "23:59:5"):
                    rt = float(RT / N)
                    avgrt = rt
                    rr = N
                    p_95 = numpy.percentile(RTs, 95)
                    print "====== Average RT for ten second interval %s is %0.2f, 95th percentile is: %0.2f and RC is %d ======" % (
                        cts, rt, p_95, rr)
                    cts = ts
                    RT = float(matches.group(2)) / 1000.
                    N = 1
                    RTs = [RT]

                    #					statcmd= '''tail -n 10 stat | grep '[0-9]*:[0-9]*:[0-9]*' | sed 's/ \+/ /g' | cut -d ' ' -f 2-5,8- | awk '{for (i=1;i<=NF;i++){a[i]+=$i;}} END {for (i=1;i<=NF;i++){printf "%f ", a[i]/NR;}}' '''
                    statcmd = '''ssh -i %s %s@%s 'tail -n 10 stat' | grep '[0-9]*:[0-9]*:[0-9]*' | sed 's/ \+/ /g' | cut -d ' ' -f %s | awk '{for (i=1;i<=NF;i++){a[i]+=$i;}} END {for (i=1;i<=NF;i++){printf "%%f ", a[i]/NR;}}' '''
                    statavg = subprocess.check_output(
                        statcmd % (prv_key, usr, repWorker, stat_f_select),
                        shell=True)
                    #				print statavg

                    workerStatus = workerInit()
                    w = sum(workerStatus.values())

                    rt = p_95  # 95th percentile
                    print "+++ Testing for scale up +++"
                    k = 0
                    if rt > upperRT:
                        plog.write(
                            "%s Testing for SCALE UP, current percentile RT: %d\n"
                            % (ts, rt))
                    while rt > upperRT:
                        statarr = [float(i) for i in statavg.split()]
                        carr = [rr / 10] + [
                            statarr[i] for i in [0, 2, 3, 6, 7, 8, 9]
                        ]
                        metrics = numpy.array(estimateMetrics(carr, w, k))
                        #						print "Metrics",metrics
                        o, r = net.test([metrics], [upperRT], iprint=0)
                        rt = o[0][0]
                        print "Predicted RT: ", rt, " for workers: ", w + k
                        plog.write("\tPredicted RT:%d ms for %d workers\n" %
                                   (rt, w + k))
                        if rt < upperRT:
                            break
                        k += 1

                    rt = p_95
                    print "--- Testing for scale down ---"
                    if rt < lowerRT and w > 1:
                        plog.write(
                            "%s Testing for SCALE DOWN, current percentile RT: %d\n"
                            % (ts, rt))
                    while rt < lowerRT and w > 1:
                        k -= 1
                        statarr = [float(i) for i in statavg.split()]
                        #						print statarr
                        carr = [rr / 10] + [
                            statarr[i] for i in [0, 2, 3, 6, 7, 8, 9]
                        ]
                        metrics = numpy.array(estimateMetrics(carr, w, k))
                        #						print "Metrics",metrics
                        o, r = net.test([metrics], [lowerRT], iprint=0)
                        rt = o[0][0]
                        print "Predicted RT: ", rt, " for workers: ", w + k
                        plog.write("\tPredicted RT:%d ms for %d workers\n" %
                                   (rt, w + k))
                        if rt > upperRT:
                            k += 1
                            break
                        if w + k == 1:
                            break

                    # log to file
                    rt = p_95
                    statarr = [float(i) for i in statavg.split()]
                    carr = [rr / 10
                            ] + [statarr[i] for i in [0, 2, 3, 6, 7, 8, 9]]
                    #					carr=[rr/10]+statarr
                    #					mlog.write("Time:%s"%ts)
                    mlog.writerow([ts, p_95, avgrt, w] + carr)
                    metrics = numpy.array(carr)
                    o, r = net.test([metrics], [80], iprint=0)
                    pRT = o[0][0]  # calculating model RT
                    logstr = "%s\tAverage RT: %f\tPredicted RT: %f\tNo. of workers in use: %d\tSuggested k: %d\tTimes suggested: %d\n" % (
                        ts, rt, pRT, w, k, timesSuggested)
                    print "@LOG: " + logstr
                    scalelog.write(logstr)
                    scalelog.flush()
                    metriclog.flush()

                    if k > 0:
                        timesSuggested += 1
                        if timesSuggested > 2:
                            timesSuggested = 0
                            for t in range(0, k):
                                addWorker(workerStatus, scalelog)
                                workerStatus = workerInit()
                            w = sum(workerStatus.values())
                    elif k < 0:
                        timesSuggested += 1
                        if timesSuggested > 2:
                            timesSuggested = 0
                            #							for t in range(0,-k):
                            #								print "Removing worker",t+1
                            removeWorker(workerStatus, scalelog)  #
                            workerStatus = workerInit()  #
                            w = sum(workerStatus.values())
                    else:
                        timesSuggested = 0
        except Exception as e:
            traceback.print_exception(*sys.exc_info())
            print line

    scalelog.close()
    plog.close()
    mlog.close()
예제 #25
0
파일: run_me.py 프로젝트: ryanmp/dlnn
def build_ffnet(sorted_data,training_set_size):

	logging.info('starting new run! -----------------------------')
	print 'defining network'

	from ffnet import ffnet, imlgraph, mlgraph, loadnet, savenet
	from time import time
	from multiprocessing import cpu_count
	import networkx
	import pylab

	#data_in_training2 = sorted_data[:training_set_size,10:-2].astype(float).tolist()
	data_target_training2 = [[i] for i in sorted_data[:training_set_size,0].astype(float)]

	new_data_in = sorted_data[:training_set_size,col_training_set[0]]
	for i in col_training_set[1:]:
		new_data_in = numpy.column_stack((new_data_in, sorted_data[:training_set_size,i]))
	data_in_training2 = new_data_in.astype(float).tolist()

	# Define net (large one)
	conec = mlgraph(network_config, biases=False) #skipping first 11 cols
	net = ffnet(conec)
	print 'saving initialized net'
	savenet(net, 'starting_net.n')
	#net = loadnet('starting_net.n') # this way we can init a complex net just once

	#print 'draw network'
	#networkx.draw_graphviz(net.graph, prog='dot')
	#pylab.show()

	graph_weekly(net, sorted_data,training_set_size) # just saving a pic

	logging.info('network built as: ' + str(network_config) )

	print "TRAINING NETWORK..."
	# that are many different training algos

	#net.train_rprop(data_in_training2, data_target_training2, a=1.9, b=0.1, mimin=1e-06, mimax=15.0, xmi=0.5, maxiter=max_functions, disp=1)
	###net.train_momentum(data_in_training2, data_target_training2, eta=0.2, momentum=0.1, maxiter=max_functions, disp=1)
	stats = []
	smallest_error = 1000
	total = 0
	try:
		for i in xrange(min_loops,max_loops):
			total += max_functions+i
			if total>max_total:
				break
			print 'training for:',max_functions+i, "total is:", total

			net.train_tnc(data_in_training2, data_target_training2, maxfun = max_functions+i, messages=1)
			#net.train_rprop(data_in_training2, data_target_training2, a=1.2, b=0.5, mimin=1e-06, mimax=50.0, xmi=0.1, maxiter=max_functions*20, disp=1)

			graph_weekly(net, sorted_data,training_set_size) # just saving a pic

			in0, out0, s1, s2, mape_weekly_all = calc_stats(net,sorted_data,training_set_size)
			stats.append((in0, out0,total, s1, s2, mape_weekly_all))
			#if out0<=(biggest/1.4) and in0>.7:
			#if out0<=(smallest_error/4) and in0>overfitting_threshold:
			#	print 'we hit overfitting threshold - breaking out early'
			#	break
			if mape_weekly_all < smallest_error: # found a new best
				smallest_error = mape_weekly_all
				savenet(net, 'best_net.n')
	except KeyboardInterrupt: # this way command-c just breaks out of this loop
		pass


	#net.train_cg(data_in_training2, data_target_training2, maxiter=max_functions, disp=1)
	#net.train_genetic(data_in_training2, data_target_training2, individuals=max_population, generations=max_functions)
	#net.train_bfgs(data_in_training2, data_target_training2, maxfun = max_functions, disp=1)
	stats = sorted(stats, reverse=True, key=lambda x: x[1])
	for i in stats:
		temp_string = ''
		for x in i:
			temp_string += str(x) + ','
		print temp_string

	net = loadnet('best_net.n')
	return net
예제 #26
0
 def load(self, path):
     self.net = loadnet(path)
예제 #27
0
#----------------------------------------------------
#################### main ###########################
#----------------------------------------------------

# load source and target array
LUT = [Getting_Arrays(LUT_file) for LUT_file in args.LUT]
if args.index:
    src = np.concatenate([xLUT['index'] for xLUT in LUT], axis=0)
    src = src.astype(float)
else:
    src = np.concatenate([xLUT['src'] for xLUT in LUT], axis=0)
trg = np.concatenate([xLUT[LUT_var] for xLUT in LUT], axis=0)

# initialize ANN
if type(args_ANN_setup) == str:
    net = ff.loadnet(args_ANN_setup)
    setup = "UNKNOWN"
else:
    num_inp_nodes, num_out_nodes = [src.shape[1]], [trg.shape[1]]
    setup = tuple(num_inp_nodes + args_ANN_setup + num_out_nodes)
    if args.full:
        conec = ff.tmlgraph(setup)
    else:
        conec = ff.mlgraph(setup)
    net = ff.ffnet(conec)

# build up a shuffled test and train array
ind = int(src.shape[0] * (1.0 - args.test_perc))
s_src, s_trg = Shuffle_2D_X(src, trg)
src_train, trg_train = s_src[:ind, :], s_trg[:ind, :]
src_test, trg_test = s_src[ind:, :], s_trg[ind:, :]