示例#1
0
def __load_folder(folder, class_name):
    image_dict = dict()
    path = ROOT_DIR + '\\dataset\\' + folder + '\\' + class_name + '\\'

    # Get all files in path
    for f in glob.iglob(path + '*'):
        # Is image
        if re.match('.*\.png$', f):
            # Put images into dict with file name (without path) as index. Images get normalized, then converted to
            # PyTorch tensors, then permuted to (Channels, Height, Width)
            image_dict[f[len(path):]] = torch.from_numpy(
                normalize(__load_image(f))).permute((2, 0, 1))

    out = list()

    for f in glob.iglob(path + '*'):
        # Is poses.txt
        if re.match('.*poses\.txt$', f):

            lines = open(f, 'r').readlines()

            for i in range(len(lines) // 2):
                pose_strings = lines[i * 2 + 1].split()
                pose = np.array(
                    (float(pose_strings[0]), float(pose_strings[1]),
                     float(pose_strings[2]), float(pose_strings[3])))

                out.append((image_dict[lines[i * 2][2:-1]], class_name, pose,
                            path + lines[i * 2][2:-1]))

    return out
示例#2
0
def prepare_data(names, feat_dict):
    X = np.array([feat_dict[name_i] for name_i in names])
    X = norm.normalize(X, 'all')
    X = np.expand_dims(X, axis=-1)
    y = [data.parse_name(name_i)[0] - 1 for name_i in names]
    y = keras.utils.to_categorical(y)
    return X, y
示例#3
0
def infer():
    message = request.json
    sample = message['sample']
    result = normalize(sample)
    print('INPUT: ', sample)
    print('RESULT: ', result)

    return send_response({'result': result})
示例#4
0
def model(data, n_rules):

    # numpy and norm
    x, y = data.dataset.tensors
    y = y.float()
    x = x.numpy()
    x, minimum, maximum = normalize(data=x)
    x = x.astype(float)

    # cmeans
    # como o numero de entradas é constante n de regras = n de funcoes de
    # pertinencia = numero de centros

    modelo = cmenas(k=n_rules)
    modelo.train(data=x, MAX=15, tol=1e-2)
    centros = modelo.C

    # denorm
    centros = denormalize(data=centros, m=minimum, M=maximum)

    names = [
        'radius_mean', 'texture_mean', 'perimeter_mean', 'area_mean',
        'smoothness_mean', 'compactness_mean', 'concavity_mean'
        'conc_mean', 'points_mean', 'symmetry_mean'
    ]

    def mk_var(name, centros, i):
        return (name, make_gauss_mfs(3,
                                     [centros[n, i] for n in range(n_rules)]))

    invardefs = [mk_var(name, centros, i) for i, name in enumerate(names)]

    outvars = ['diagnosis']

    model = anfis.AnfisNet('breast-cancer', invardefs, outvars)
    return model
示例#5
0
文件: iris.py 项目: gtLara/anfis
def model(data, n_rules):

    # numpy and norm
    x, y = data.dataset.tensors
    x = x.numpy()
    x, minimum, maximum = normalize(data=x)

    # cmeans
    # como o numero de entradas é constante n de regras = n de funcoes de
    # pertinencia = numero de centros

    modelo = cmenas(k=n_rules)
    modelo.train(data=x, MAX=15, tol=1e-2)
    centros = modelo.C

    # denorm
    centros = denormalize(data=centros, m=minimum, M=maximum)

    def mk_var(name, centros, i):  # de iris_example
        return (name,
                make_gauss_mfs(1,
                               [centros[0, i], centros[1, i], centros[2, i]]))

    def mk_var(name, centros, i):  # de iris_example
        return (name, make_gauss_mfs(1,
                                     [centros[n, i] for n in range(n_rules)]))

    invardefs = [
        mk_var(name, centros, i) for i, name in enumerate(
            ['SepalLengthCm', 'SepalWidthCm', 'PetalLengthCm', 'PetalWidthCm'])
    ]

    outvars = ['Species']

    model = anfis.AnfisNet('iris', invardefs, outvars)
    return model
示例#6
0
def normalize_multi(pair, values):
    for i in values:
        normalize(pair, i)
示例#7
0
from csvread import readcsv
from neuron3to4lyr import NN
from norm import normalize

ismr = readcsv('ismr.csv')
nino3 = readcsv('nino3.csv')
ismr_norm = normalize(ismr)
nino3_norm = normalize(nino3)

no_of_months = len(ismr)

ismr_nino3 = [[[None], [None]] for x in xrange(no_of_months)]
ismr_nino3_train = [[[None], [None]] for x in xrange(no_of_months - 500)]
ismr_nino3_test = [[[None], [None]] for x in xrange(500)]

#for i in range(no_of_months):
#	ismr_nino3[i][0][0] = ismr[i]
#	ismr_nino3[i][1][0] = nino3[i]
#
#ismr_nino3_train = ismr_nino3[0:no_of_months-500-1]
#ismr_nino3_test  = ismr_nino3[no_of_months-500:no_of_months-1]
## create a network with two input, two hidden, and one output nodes
#n = NN(1, 4, 2 , 1)
## train it with some patterns
#n.train(ismr_nino3_train)
## save a network
## test it
#n.test(ismr_nino3_test)
#
#
示例#8
0
def infer():
    message = request.json
    sample = message['sample']
    result = normalize(sample)
    print('INPUT: ', sample)
    print('RESULT: ', result)

    return send_response({'result': result})


if __name__ == '__main__':
    config = configparser.ConfigParser()
    config.read("conf/config.cfg")

    # dict
    abbre_dict = utils.read_txt_two_cols(config['resources']['abbre_path'])
    try:
        oov_dict = utils.read_oov(config['resources']['oov_path'])
    except:
        oov_dict = {}
    # init api
    # app.debug = True
    # host = os.environ.get('IP', '0.0.0.0')
    # port = int(os.environ.get('PORT', 11993))
    # app.run(host=host, port=port, threaded=True, use_reloader=False)
    # app.run()

    while True:
        inp = input('nhap input vao day: ')
        result = normalize(inp)
        print('result: ', result)
示例#9
0
 def test_fromAndTos(self):
     self.assertAlmostEqual(normalize(5, 20), 5.011552452941506)