Esempio n. 1
0
def main():
    args = parse_args()

    print('Audio socket classifier')

    print("Restoring model: ", args.model)
    mdl = model.restore(args.model)

    if mdl is None:
        print("Can't classify without an existing model")
        return

    endpoints.serverFromString(reactor, "tcp:80").listen(PubFactory(mdl, args))
    reactor.run()
Esempio n. 2
0
def main():
    """
    Loads an existing model, opens audio input stream, classifies input
    """
    args = parse_args()

    print('Audio stream classifier')

    print("Restoring model: ", args.model)
    mdl = model.restore(args.model)

    if mdl is None:
        print("Can't classify data without an existing model.")
        return

    print("Opening audio input..")

    audio = pyaudio.PyAudio()
    stream = audio.open(format=pyaudio.paFloat32,
                        channels=1,
                        rate=args.sample_rate,
                        input=True,
                        frames_per_buffer=args.frame_size)

    label_a = label_b = ""

    if args.labels is not None:
        label_a = args.labels[0]
        label_b = args.labels[1]

    while True:
        # Peel off [frame_size] bytes from the audio stream
        stream_data = stream.read(args.frame_size)

        # Unpack the binary stream and expand
        data = struct.unpack("%df" % args.frame_size, stream_data)
        data = np.expand_dims([data], axis=2)

        avg = model.classify(mdl, data)

        steps = 20
        a = int(math.ceil(avg * steps))
        b = steps - a

        print(label_a + " [" + ("." * a) + "|" + ("." * b) + "] " + label_b +
              " - " + str(avg),
              end='\r')
Esempio n. 3
0
import numpy as np
import model
from model import SET_HYPERPARAMETER

SET_HYPERPARAMETER("diffLatentSpace", 6)
data = np.load("./npz/diffs.npz")["arr_1"]
model = model.emptyModel("DIFF_17jan_ls6_g",
                         inputsShape=list(data.shape[1:]),
                         use="diff",
                         log=False)
model.restore("DIFF_17jan_ls6_f")
code = [[0, 0, 0, 0, 0, 0]]
result = model.generate(code, data[0:1])
print(result)
print(result.shape)
Esempio n. 4
0
parser.add_argument('--connected-components-threshold', type=float, default=0.05)
parser.add_argument('--width', type=int, default=768, help='input image width')
parser.add_argument('--height', type=int, default=1024, help='input image height')
opts = parser.parse_args()

# feed data through an explicit placeholder to avoid using tf.data
imgs = tf.placeholder(dtype=tf.float32, shape=(1, opts.height, opts.width, 3), name='input_imgs')

# restore model
model = model.Model(imgs,
                    is_training=False,
                    use_skip_connections=not opts.no_use_skip_connections,
                    base_filter_size=opts.base_filter_size,
                    use_batch_norm=not opts.no_use_batch_norm)
sess = tf.Session()
model.restore(sess, "ckpts/%s" % opts.run)

if opts.output_label_db:
  db = LabelDB(label_db_file=opts.output_label_db)
  db.create_if_required()
else:
  db = None

if opts.export_pngs:
  export_dir = "predict_examples/%s" % opts.run
  print("exporting prediction samples to [%s]" % export_dir)
  if not os.path.exists(export_dir):
    os.makedirs(export_dir)

# TODO: make this batched to speed it up for larger runs
Esempio n. 5
0
import model
import numpy as np
from model import SET_HYPERPARAMETER

SET_HYPERPARAMETER("contrast", 50.0)
SET_HYPERPARAMETER("diffLatentSpace", 30)
data = np.load("./npz/diffsWithNames.npz")
goods = data["arr_0"]
bads = data["arr_1"]
model = model.emptyModel("generateSmoothDiff",
                         inputsShape=list(goods[0].shape),
                         use="diff",
                         log=False)

model.restore("28jan_ls30")
trainDiffs = model.reproduce(goods)
testDiffs = model.reproduce(bads)
np.savez("./npz/smoothDiffsWithNames_ls30.npz", trainDiffs, testDiffs,
         data["arr_2"], data["arr_3"])
Esempio n. 6
0
import numpy as np
import os
import jieba
import config

C = config.Config()
jieba.load_userdict('data/coal_dict.txt')

model = model.Model()
labels = util.labelGenerator()
compute_graph = tf.get_default_graph()

with compute_graph.as_default():
    model.lstm_crf_predict()  #Define graph
    model.load_word2vec()  #Initialize all variables
    model.restore()
    trainHelper = util.trainHelper(
        model.word2vector)  #Train helper do the padding
    # for x,y in batchLoader:
    #     x_raw = x.copy()
    #     x,y,sequence_length,seq_max_len = trainHelper.generateBatch(x,y)
    #     viterbi_sequence = model.predict(x,sequence_length,seq_max_len)
    #     print(x_raw[0])
    #     print(labels.ID2label(viterbi_sequence[0][0]))
    while True:
        seg_X = input("Input your sentence :").replace(' ',
                                                       '').replace('\n', '')
        x = list(jieba.cut(seg_X))
        x_raw = x.copy()
        x, sequence_length, seq_max_len = trainHelper.generateData4Predict(x)
        x = np.reshape(x, (1, x.shape[0]))
Esempio n. 7
0
from model import restore

restore()
Esempio n. 8
0
import model
import numpy as np
from model import SET_HYPERPARAMETER

SET_HYPERPARAMETER("contrast", 300.0)
SET_HYPERPARAMETER("learningRate", 0.0005)
SET_HYPERPARAMETER("diffLatentSpace", 5)
SET_HYPERPARAMETER("normalize", "individual")

files = np.load("./npz/diffsWithNames.npz")
goods = files["arr_0"]
bads = files["arr_1"]
data = np.concatenate([bads, goods])
model = model.emptyModel("5feb-ls5-inorm_f",
                         inputsShape=list(data.shape[1:]),
                         use="diff")

model.restore("5feb-ls5-inorm_e")
model.train(epoch=100, dataset=data)
model.save()
Esempio n. 9
0
import model
import numpy as np
from model import SET_HYPERPARAMETER

SET_HYPERPARAMETER("contrast", 300.0)
SET_HYPERPARAMETER("diffLatentSpace", 12)
SET_HYPERPARAMETER("normalize", "individual")

data = np.load("./npz/diffsWithNames.npz")
goods = data["arr_0"]
bads = data["arr_1"]
model = model.emptyModel("generateEncode",
                         use="diff",
                         log=False,
                         inputsShape=list(goods[0].shape))

model.restore("5feb-ls12-inorm_d")
testEncoded = model.encode(bads)
trainEncoded = model.encode(goods)
np.savez("./npz/codesWithNames_inorm.npz", trainEncoded, testEncoded,
         data["arr_2"], data["arr_3"])
Esempio n. 10
0
import model
import numpy as np
from model import SET_HYPERPARAMETER

SET_HYPERPARAMETER("contrast", 50.0)
SET_HYPERPARAMETER("diffLatentSpace", 8)
data = np.load("./npz/diffsWithNames.npz")
goods = data["arr_0"]
bads = data["arr_1"]
model = model.emptyModel("generateFeatures",
                         use="diff",
                         log=False,
                         inputsShape=list(goods[0].shape))

model.restore("DIFF_23jan_ls8_e")
testFeatures = model.getFeatures(bads)
trainFeatures = model.getFeatures(goods)
np.savez("./npz/featuresWithNames_ls8.npz", trainFeatures, testFeatures,
         data["arr_2"], data["arr_3"])
Esempio n. 11
0
    testdata.read(testpath, wikidata.char_id)
    webdata = data.Dataset(params)
    webdata.read(webpath, wikidata.char_id)
    print('data loaded')

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    sess = tf.Session(config=config)  
    model = model.Model(params)
    model.model_build(sess)
    print('model built')
    model.model_initialize(sess)
    print('model initialize')


    # model.lattice_train(wikidata, sess, 300000)
    # print('wiki_lattice finished')
    # model.train(traindata, sess, 40000)
    # print('all_train finished')
    # model.evaluate(testdata, sess)
    
    # model.train(traindata, sess, 40000)
    # print('train finished')
    # model.evaluate(testdata, sess)
    # print('lattice and text at the same time')
    model.restore('./calc_data/lattice2textlattice2divide_data/all_finished_save/model.ckpt', sess)
    wiki = [ x for x in open('hoge', 'r') ]
    for text in wiki:
        model.demo(text.strip(), wikidata.char_id, sess)
        
Esempio n. 12
0
import model
import numpy as np
from model import SET_HYPERPARAMETER

SET_HYPERPARAMETER("latentSpace", 1)

data = np.load("./npz/dataWithNames.npz")
goods = data["arr_0"]
bads = data["arr_1"]
model = model.emptyModel("generateDiff",
                         inputsShape=list(goods[0].shape),
                         log=False,
                         use="jtekt")

model.restore("4feb-ls1")
trainDiffs = model.getDiff(goods)
testDiffs = model.getDiff(bads)
np.savez("./npz/diffsWithNames.npz", trainDiffs, testDiffs, data["arr_2"],
         data["arr_3"])
Esempio n. 13
0
import os
import time

import tensorflow as tf
import util
import model
import ujson as json

if __name__ == "__main__":
    test_data = list()
    print('Start to process data...')
    with open('test.jsonlines', 'r') as f:
        for line in f:
            tmp_example = json.loads(line)
            test_data.append(tmp_example)
    print('finish processing data')
    os.environ["CUDA_VISIBLE_DEVICES"] = "1"

    config = util.initialize_from_env()
    model = model.KnowledgePronounCorefModel(config)

    with tf.Session() as session:
        model.restore(session)

        # print('we are working on NP-NP')
        model.evaluate(session, test_data, official_stdout=True)
        # model.evaluate(session)
        # model.evaluate_baseline_methods(session)

print('end')
Esempio n. 14
0
def main():
    args = parse_args()

    print('Audio classifier')

    output_prefix = os.path.splitext(args.data_file)[0]

    if not os.path.exists(args.logdir):
        os.mkdir(args.logdir)

    # create / restore model
    mdl = None

    if args.model:
        print("Restoring model..")
        print("  input:", args.model)
        mdl = model.restore(args.model)
    elif model.model_exists(args.logdir) and not args.force:
        print("Restoring last created model..")
        file = model.most_recent(args.logdir)
        print("  input:", file)
        mdl = model.restore(file)

    if not args.predict:
        # training task
        print("Loading training data..")
        print("  input:", args.data_file)

        # load data from file
        X, Y = data.load_labeled_data(args.data_file)

        w, h = np.shape(X)
        print("  size: {}x{}".format(w, h))

        # encode labels
        Y, Dict = data.encode_labels(Y)

        print("  labels: ", Dict)
        # save labels with data
        # model.save_labels(Dict, args.logdir + output_prefix + "_labels.txt")

        if mdl is None:
            print("Creating new model..")
            mdl = model.create(h)

        train(mdl, (X, Y), args.logdir + output_prefix, args.kfolds,
              args.epochs)

    else:
        # prediction task
        print("Loading classification data..")
        print("  input:", args.data_file)

        X = data.load_unlabeled_data(args.data_file)

        w, h = np.shape(X)
        print("  size: {}x{}".format(w, h))

        if mdl is None:
            print("Can't classify without an existing model")
            return

        classify(mdl, X)