Exemplo n.º 1
0
def predict(model, x, gpu_id=0):
    batch_size = 100
    epoch = int(np.ceil(len(x)/batch_size))
    y = np.array([]).astype(np.int32)
    for i in range(epoch):
        data = Variable(x[i*batch_size:(i+1)*batch_size])
        if gpu_id > -1:
            data.to_gpu()
        pred = model.predictor(data)
        pred.to_cpu()
        y = np.r_[y, np.argmax(pred.data, axis=1)]
    return y
Exemplo n.º 2
0
def creation():
    inference = []
    try:
        start = date.today()
        d = datetime.timedelta(days=100)
        a = start - d
        start = start.strftime('%Y-%m-%d')
        a = a.strftime('%Y-%m-%d')
        for i in tickers:
            data = yf.download(i, start=a, end=start)
            train = preprocess(data)
            train = windowed_dataset(train.values, 60, 32, 100)
            model = predictor('------------')
            inference.append(i, (model.predict(train, batch_size=32)))
    except:
        print('The ticker {} does exist'.format(i))
    finally:
        return inference
Exemplo n.º 3
0
def upload_file():
    forward_message = ""
    uploaded_file = "No file chosen."
    test = UPLOAD_FOLDER + '\\*'
    r = glob.glob(test)
    for i in r:
        os.remove(i)
    if request.method == 'POST':
        file = request.files['file[]']
        if file:
            filename = secure_filename(file.filename)
            uploaded_file = filename
            file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
            os.rename(UPLOAD_FOLDER + '\\' + filename,
                      UPLOAD_FOLDER + '\\' + 'test.jpg')
            forward_message = "Prediction output: " + model.predictor()
    return render_template('app.html',
                           forward_message=forward_message,
                           uploaded_file=uploaded_file)
    extractor_optim = optim.Adam(extractor.parameters(), lr=3e-4)
    min_ = float('inf')

    for source in source_domain:
        print(source)
        dataset = DATASET(source, source + '_train.csv')
        dataset = DataLoader(dataset, batch_size=BATCH_SIZE, shuffle=True)
        if len(dataset) < min_:
            min_ = len(dataset)

        source_dataloader_list.append(dataset)

        # c1 : for target
        # c2 : for source
        source_clf[source] = {}
        source_clf[source]['c1'] = predictor().to(device)
        source_clf[source]['c2'] = predictor().to(device)
        #source_clf[source]['c1'].load_state_dict(torch.load('./model/2'+target_domain+'/'+source+'_c1_5.pth'))
        #source_clf[source]['c2'].load_state_dict(torch.load('./model/2'+target_domain+'/'+source+'_c2_5.pth'))
        source_clf[source]['optim'] = optim.Adam(
            list(source_clf[source]['c1'].parameters()) +
            list(source_clf[source]['c2'].parameters()),
            lr=3e-4)

    target_dataset = DATASET(target_domain, target_domain + '_train.csv')
    target_dataloader = DataLoader(target_dataset,
                                   batch_size=BATCH_SIZE,
                                   shuffle=True)
    if len(target_dataloader) < min_:
        min_ = len(target_dataloader)
Exemplo n.º 5
0
def main(src, tar):

	G = feature_extractor().to(device)

	cls_c1 = predictor().to(device)
	cls_c2 = predictor().to(device)

	cls_c1.apply(weights_init_uniform)
	cls_c2.apply(weights_init_uniform)

	###		 dataloader  	 ###
	if src == 'mnist':
		src_train_set = dset.MNIST('./dataset/mnist', train=True, download=True, transform=gray2rgb_transform)	
		
	elif src == 'mnistm':
		src_train_set = DATASET('./dataset/mnistm/train', './dataset/mnistm/train.csv', transforms=rgb_transform)

	elif src == 'svhn':
		src_train_set = dset.SVHN(root='./dataset/svhn/', download=download, transform=rgb_transform)

	elif src == 'usps':
		src_train_set = DATASET('./dataset/usps/train', './dataset/usps/train.csv', transforms=gray2rgb_transform)


	if tar == 'svhn':
		tar_train_set = dset.SVHN(root='./dataset/svhn/', download=download, transform = rgb_transform)

	elif tar == 'mnist':
		tar_train_set = dset.MNIST('./dataset/mnist', train=True, download=True, transform=gray2rgb_transform)

	elif tar == 'mnistm':
		tar_train_set = DATASET('./dataset/mnistm/train', './dataset/mnistm/train.csv', transform=rgb_transform)

	elif tar == 'usps':
		tar_train_set = DATASET('./dataset/usps/train', './dataset/usps/train.csv', transform=rgb_transform)
		
		

	src_train_loader = torch.utils.data.DataLoader(
		dataset = src_train_set,
		batch_size = BATCH_SIZE,
		shuffle = True,
		)

	tar_train_loader = torch.utils.data.DataLoader(
		dataset = tar_train_set,
		batch_size = BATCH_SIZE,
		shuffle = True,
		)

	optimizer_encoder = optim.Adam(G.parameters() , lr=3e-4, weight_decay=0.0005)
	optimizer_clf_1 = optim.Adam(cls_c1.parameters(), lr=3e-4, weight_decay=0.0005)
	optimizer_clf_2 = optim.Adam(cls_c2.parameters(), lr=3e-4, weight_decay=0.0005)

	# train
	ac_list, loss_list = train(G, cls_c1, cls_c2, optimizer_encoder, optimizer_clf_1, optimizer_clf_2, EP, src_train_loader, tar_train_loader, src, tar)
	ac_list = np.array(ac_list).flatten()
	
	# plot tsne
	loss_list = np.array(loss_list).flatten()
	epoch = [i for i in range(EP)]
	my_function.tsne_plot(G, src_train_loader, tar_train_loader, src, tar, BATCH_SIZE, 'mcd', mode=False)

	### plot learning curve  ###
	plt.figure()
	plt.plot(epoch, ac_list)
	plt.xlabel('EPOCH')
	plt.ylabel('Accuracy')
	plt.title('domian_adapt : ' + src + ' to ' + tar)
	plt.savefig('./learning_curve/domian_adapt_' + src + '_to_' + tar + '_accuracy.jpg')

	plt.figure()
	plt.plot(epoch, loss_list)
	plt.xlabel('EPOCH')
	plt.ylabel('Loss')
	plt.title('domian_adapt : ' + src + ' to ' + tar)
	plt.savefig('./learning_curve/domian_adapt_' + src + '_to_' + tar + '_loss.jpg')
def main(src, tar):

    G = feature_extractor().to(device)

    cls_c1 = predictor().to(device)
    cls_c2 = predictor().to(device)

    cls_c1.apply(weights_init_uniform)
    cls_c2.apply(weights_init_uniform)

    ###		 dataloader  	 ###
    if src == 'sketch':
        src_train_set = DATASET('sketch', 'sketch_train.csv')

    elif src == 'infograph':
        src_train_set = DATASET('infograpth', 'infograph_train.csv')

    elif src == 'real':
        src_train_set = DATASET('real', 'real_train.csv')

    elif src == 'quickdraw':
        src_train_set = DATASET('quickdraw', 'quickdraw_train.csv')

    if tar == 'sketch':
        tar_train_set = DATASET('sketch', 'sketch_train.csv')

    elif tar == 'infograph':
        tar_train_set = DATASET('infograpth', 'infograph_train.csv')

    elif tar == 'real':
        tar_train_set = DATASET('real', 'real_train.csv')

    elif tar == 'quickdraw':
        tar_train_set = DATASET('quickdraw', 'quickdraw_train.csv')

    src_train_loader = torch.utils.data.DataLoader(
        dataset=src_train_set,
        batch_size=BATCH_SIZE,
        shuffle=True,
    )

    tar_train_loader = torch.utils.data.DataLoader(
        dataset=tar_train_set,
        batch_size=BATCH_SIZE,
        shuffle=True,
    )

    optimizer_encoder = optim.Adam(G.parameters(),
                                   lr=2e-4,
                                   weight_decay=0.0005)
    optimizer_clf_1 = optim.Adam(cls_c1.parameters(),
                                 lr=2e-4,
                                 weight_decay=0.0005)
    optimizer_clf_2 = optim.Adam(cls_c2.parameters(),
                                 lr=2e-4,
                                 weight_decay=0.0005)

    # train
    ac_list, loss_list = train(G, cls_c1, cls_c2, optimizer_encoder,
                               optimizer_clf_1, optimizer_clf_2, EP,
                               src_train_loader, tar_train_loader, src, tar)
    ac_list = np.array(ac_list).flatten()

    # plot tsne
    loss_list = np.array(loss_list).flatten()
    #epoch = [i for i in range(EP)]
    #my_function.tsne_plot(G, src_train_loader, tar_train_loader, src, tar, BATCH_SIZE, 'mcd', mode=False)

    ### plot learning curve  ###
    plt.figure()
    plt.plot(epoch, ac_list)
    plt.xlabel('EPOCH')
    plt.ylabel('Accuracy')
    plt.title('domian_adapt : ' + src + ' to ' + tar)
    plt.savefig('./learning_curve/domian_adapt_' + src + '_to_' + tar +
                '_accuracy.jpg')

    plt.figure()
    plt.plot(epoch, loss_list)
    plt.xlabel('EPOCH')
    plt.ylabel('Loss')
    plt.title('domian_adapt : ' + src + ' to ' + tar)
    plt.savefig('./learning_curve/domian_adapt_' + src + '_to_' + tar +
                '_loss.jpg')