コード例 #1
0
    filter_name = raw_input('Filter name (return for none): ')
    if filter_name == '':
        filter_name = None

    group = raw_input('Group name (return for none): ')
    if group == '':
        group = 'Image'

    results_table = raw_input('Results table name (return for none): ')

    logging.info('Loading properties file...')
    p = Properties.getInstance()
    p.LoadFile(props_file)
    logging.info('Loading training set...')
    ts = TrainingSet(p)
    ts.Load(ts_file)

    score(p,
          ts,
          nRules,
          filter_name,
          group,
          show_results=True,
          results_table=results_table,
          overwrite=False)

    app.MainLoop()

    #
    # Kill the Java VM
コード例 #2
0
    db = DBConnect.getInstance()
    dm = DataModel.getInstance()

    #    props = '/Volumes/imaging_analysis/2007_10_19_Gilliland_LeukemiaScreens/Screen3_1Apr09_run3/2007_10_19_Gilliland_LeukemiaScreens_Validation_v2_AllBatches_DuplicatesFiltered_FullBarcode_testSinglePlate.properties'
    #    ts = '/Volumes/imaging_analysis/2007_10_19_Gilliland_LeukemiaScreens/Screen3_1Apr09_run3/trainingvalidation3b.txt'
    props = '../Properties/nirht_area_test.properties'
    ts = '/Users/afraser/Desktop/MyTrainingSet3.txt'
    nRules = 5
    filter = 'MAPs'
    #    props = '/Users/afraser/Desktop/2007_10_19_Gilliland_LeukemiaScreens_Validation_v2_AllBatches_DuplicatesFiltered_FullBarcode.properties'
    #    ts = '/Users/afraser/Desktop/trainingvalidation3d.txt'
    #    nRules = 50
    #    filter = 'afraser_test'

    p.LoadFile(props)
    trainingSet = TrainingSet(p)
    trainingSet.Load(ts)
    output = StringIO()
    print('Training classifier with ' + str(nRules) + ' rules...')
    weaklearners = fastgentleboostingmulticlass.train(trainingSet.colnames,
                                                      nRules,
                                                      trainingSet.label_matrix,
                                                      trainingSet.values,
                                                      output)
    table = PerImageCounts(weaklearners, filter_name=filter)
    table.sort()

    labels = ['table', 'image'] + list(trainingSet.labels) + list(
        trainingSet.labels)
    print(labels)
    for row in table:
コード例 #3
0
ファイル: image.py プロジェクト: jessetvogel/NeuralNetwork
import glob, os
from PIL import Image
from trainingset import TrainingSet

# Declare directories
png_directory = "/Users/jessetvogel/Downloads/mnist_png"
nnts_directory = "/Users/jessetvogel/Desktop/data"

# Scan directories
for subdirectory in ["training", "testing"]:
    print "Scanning " + subdirectory + " directory ..."
    training_set = TrainingSet(1, 1, 0.0, 1.0, 0.0, 1.0, 28 * 28, 10)
    for i in range(10):
        print "Scanning for digit " + str(i)
        n = 0
        directory = png_directory + "/" + subdirectory + "/" + str(i)
        for file in os.listdir(directory):
            if file.endswith(".png"):
                img = Image.open(directory + "/" + file)
                output = [0.0] * 10
                output[i] = 1.0
                training_set.add_sample([x / 255.0 for x in list(img.getdata())], output)
                img.close()
                n += 1
                if n == 25:
                    break
        print "Read " + str(n) + " images"

    print "Saving " + subdirectory + " ..."
    training_set.save(nnts_directory + "/" + subdirectory + ".nnts")
    print ""
コード例 #4
0
    ##
    #group = raw_input('Group name (return for none): ')
    #if group=='':
    #group = 'Image'
    group = 'None'

    ##
    #results_table = raw_input('Results table name (return for none): ')
    results_table = ''

    logging.info('Loading properties file...')
    p = Properties.getInstance()
    p.LoadFile(props_file)
    logging.info('Loading initial training set...')
    ts = TrainingSet(p)
    ts.Load(ts_file)
    logging.info('Loading ground truth training set...')
    gt = TrainingSet(p)
    gt.Load(gt_file)

    score_objects(p,
                  ts,
                  gt,
                  nRules,
                  filter_name,
                  group,
                  show_results=True,
                  results_table=results_table,
                  overwrite=False)
コード例 #5
0
from trainingset import TrainingSet

path = "tmp_file.nnts"

# Create training set
tset = TrainingSet(1, 1, 0.0, 1.0, 0.0, 1.0, 2, 1)
tset.add_sample([0.0, 0.0], [0.0])
tset.add_sample([0.0, 1.0], [1.0])
tset.add_sample([1.0, 0.0], [1.0])
tset.add_sample([1.0, 1.0], [0.0])

# Write training set
tset.save(path)

# # Clean up
# import os
# os.remove(path)