コード例 #1
0
def main():
    tool = DataTool("./Material Glaucoma/refuge_images")
    tool.loadData(r'[n|g]\d{4}[^_]')

    """
    handmade_p_tile_method, sobel_watershed_method,  otsu_local_method, funcionan decente con disc
    """

    if(1>2):
        met_list = []
        #ch.handmade_p_tile_method, ch.sobel_watershed_method, ch.otsu_local_method
        for alg in [ch.sobel_watershed_method]:
            masks = l.run_all(tool,alg, op="cup")
            ss = (alg,l.metrics(masks, l.sensitivity),  l.metrics(masks, l.specificity))
            jd = (alg,l.metrics(masks, l.DICE), l.metrics(masks, l.jaccard))
            met_list.append(ss)
            met_list.append(jd)

        vi.plot_scatter(met_list,3,2,"metrics")
        vi.show()

    else:
        # tool_entry = tool.data["g0406.png"]
        tool_entry = tool.data["n0012.png"]
        # tool_entry = tool.data["n0012.png"]
        # tool_entry = tool.data["n0005.png"]
        # tool_entry = tool.data["n0013.png"]

        
        img = tool_entry["img"]
        (_,cut, _) = ch.sobel_watershed_method(img,test=True)
        # ch.snakes(cut, op="cup", test=True)
        vi.show()
コード例 #2
0
def segments(image, k, i_sigma=2.2):
    blur = skimage.color.rgb2gray(image)
    blur = skimage.filters.gaussian(blur, sigma=i_sigma)

    image_gray = blur.reshape(blur.shape[0] * blur.shape[1], 1)

    kmeans = KMeans(n_clusters=k, random_state=0).fit(image_gray)

    clustered = kmeans.cluster_centers_[kmeans.labels_]
    labels = kmeans.labels_

    for i in range(k):
        image_cluster = []
        for i in range(len(labels)):
            if (labels[i]) == k:
                image_cluster.append(float(clustered[i]))
            else:
                image_cluster.append(1)

    if (k == 1):
        image_fix = np.array(image_cluster).reshape(blur.shape)

    reshape_clustered = np.array(image_cluster).reshape(blur.shape)

    vi.pltImage(blur)
    vi.show()
コード例 #3
0
def plot_jornet_joints_global_depth(joints_global_depth, filenamebase,
                                    gt_joints=None, color_jornet_joints='C6'):
    if gt_joints is None:
        visualize.plot_3D_joints(joints_global_depth)
    else:
        fig, ax = visualize.plot_3D_joints(gt_joints)
        visualize.plot_3D_joints(joints_global_depth, fig=fig, ax=ax, color=color_jornet_joints)
    visualize.title('JORNet (GT multi-coloured; JORNet single color): ' + filenamebase)
    visualize.show()
    return joints_global_depth
コード例 #4
0
def plot_halnet_joints_from_heatmaps_crop(halnet_main_out, img_numpy, filenamebase, plot=True):
    labels_colorspace = conv.heatmaps_to_joints_colorspace(halnet_main_out)
    data_crop, crop_coords, labels_heatmaps, labels_colorspace = \
        converter.crop_image_get_labels(img_numpy, labels_colorspace, range(21))
    if plot:
        fig = visualize.create_fig()
        visualize.plot_image(data_crop, title=filenamebase, fig=fig)
        visualize.plot_joints_from_colorspace(labels_colorspace, title=filenamebase, fig=fig, data=data_crop)
        visualize.title('HALNet (joints from heatmaps - cropped): ' + filenamebase)
        visualize.show()
    return data_crop
コード例 #5
0
def firstExperiment (st):
	for i in range(2):
		print ('recording...')
		fr = record (st)
		print (fr)

		from visualize import getWave_pivot_jump as gpj, getMax

		mx, idx = getMax (fr)
		trim, L = gpj (fr, pivot=idx, outPick = True)
		sd.play (trim)
		vz.plot (fr)	
		vz.plot (trim, L)
		vz.show ()
コード例 #6
0
ファイル: oppg2.py プロジェクト: NilsBarlaug/TDT4137
def main():

    hidden_size = 3

    dataset = SupervisedDataSet(1, 1)
    for i in range(1, 9):
        dataset.addSample(i, i)

    net = buildNetwork(1, hidden_size, 1, hiddenclass=TanhLayer)

    trainer = BackpropTrainer(net, dataset)

    trainer.trainUntilConvergence(verbose=False, validationProportion=0.15,
                                  maxEpochs=1000, continueEpochs=10)

    print latex.format(net)
    visualize.show(net)
コード例 #7
0
                    help='Total number of examples to check')
parser.add_argument('-r',
                    dest='root_folder',
                    default='',
                    required=True,
                    help='Root folder for dataset')
args = parser.parse_args()

train_loader = synthhands_handler.get_SynthHands_trainloader(
    root_folder=args.root_folder,
    joint_ixs=range(21),
    heatmap_res=(320, 240),
    batch_size=1,
    verbose=True)

print("Checking " + str(NUM_EXAMPLES) + " examples from the training set")
for batch_idx, (data, target) in enumerate(train_loader):
    filenamebase = train_loader.dataset.get_filenamebase(batch_idx)
    target_heatmaps, target_joints, target_roothand = target
    visualize.plot_image_and_heatmap(target_heatmaps[0][4].cpu().data.numpy(),
                                     data=data[0].cpu().data.numpy(),
                                     title='Training set\n' + filenamebase +
                                     '\nImage + Heatmap(thumb tip)')
    visualize.show()
    visualize.plot_joints_from_heatmaps(target_heatmaps[0].data.cpu().numpy(),
                                        title='Joints: ' + filenamebase,
                                        data=data[0].data.cpu().numpy())
    visualize.show()
    if (batch_idx + 1) == args.num_examples:
        break
コード例 #8
0
import folium
import visualize as vs

# Query the Data and grab data
# put in 2d array [zip][essential]
# call function like below

nyMap = folium.Map(location=[40.7128, -74.0060],
                   titles='Stamen Toner',
                   zoom_start=11)
dataset1, dataset2 = None, None
vs.choropleth(nyMap, 'Layer1', "Test1", dataset1)
vs.choropleth(nyMap, 'Layer2', "Test2", dataset2)
vs.show(nyMap)
コード例 #9
0
def validate(valid_loader, model, optimizer, valid_vars, control_vars, verbose=True):
    curr_epoch_iter = 1
    for batch_idx, (data, target) in enumerate(valid_loader):
        control_vars['batch_idx'] = batch_idx
        if batch_idx < control_vars['iter_size']:
            print_verbose("\rPerforming first iteration; current mini-batch: " +
                          str(batch_idx + 1) + "/" + str(control_vars['iter_size']), verbose, n_tabs=0, erase_line=True)
        # start time counter
        start = time.time()
        # get data and targetas cuda variables
        target_heatmaps, target_joints, target_joints_z = target
        data, target_heatmaps = Variable(data), Variable(target_heatmaps)
        if valid_vars['use_cuda']:
            data = data.cuda()
            target_heatmaps = target_heatmaps.cuda()
        # visualize if debugging
        # get model output
        output = model(data)
        # accumulate loss for sub-mini-batch
        if valid_vars['cross_entropy']:
            loss_func = my_losses.cross_entropy_loss_p_logq
        else:
            loss_func = my_losses.euclidean_loss
        loss = my_losses.calculate_loss_HALNet(loss_func,
            output, target_heatmaps, model.joint_ixs, model.WEIGHT_LOSS_INTERMED1,
            model.WEIGHT_LOSS_INTERMED2, model.WEIGHT_LOSS_INTERMED3,
            model.WEIGHT_LOSS_MAIN, control_vars['iter_size'])

        if DEBUG_VISUALLY:
            for i in range(control_vars['max_mem_batch']):
                filenamebase_idx = (batch_idx * control_vars['max_mem_batch']) + i
                filenamebase = valid_loader.dataset.get_filenamebase(filenamebase_idx)
                fig = visualize.create_fig()
                #visualize.plot_joints_from_heatmaps(output[3][i].data.numpy(), fig=fig,
                #                                    title=filenamebase, data=data[i].data.numpy())
                #visualize.plot_image_and_heatmap(output[3][i][8].data.numpy(),
                #                                 data=data[i].data.numpy(),
                #                                 title=filenamebase)
                #visualize.savefig('/home/paulo/' + filenamebase.replace('/', '_') + '_heatmap')

                labels_colorspace = conv.heatmaps_to_joints_colorspace(output[3][i].data.numpy())
                data_crop, crop_coords, labels_heatmaps, labels_colorspace = \
                    converter.crop_image_get_labels(data[i].data.numpy(), labels_colorspace, range(21))
                visualize.plot_image(data_crop, title=filenamebase, fig=fig)
                visualize.plot_joints_from_colorspace(labels_colorspace, title=filenamebase, fig=fig, data=data_crop)
                #visualize.savefig('/home/paulo/' + filenamebase.replace('/', '_') + '_crop')
                visualize.show()

        #loss.backward()
        valid_vars['total_loss'] += loss
        # accumulate pixel dist loss for sub-mini-batch
        valid_vars['total_pixel_loss'] = my_losses.accumulate_pixel_dist_loss_multiple(
            valid_vars['total_pixel_loss'], output[3], target_heatmaps, control_vars['batch_size'])
        if valid_vars['cross_entropy']:
            valid_vars['total_pixel_loss_sample'] = my_losses.accumulate_pixel_dist_loss_from_sample_multiple(
                valid_vars['total_pixel_loss_sample'], output[3], target_heatmaps, control_vars['batch_size'])
        else:
            valid_vars['total_pixel_loss_sample'] = [-1] * len(model.joint_ixs)
        # get boolean variable stating whether a mini-batch has been completed
        minibatch_completed = (batch_idx+1) % control_vars['iter_size'] == 0
        if minibatch_completed:
            # append total loss
            valid_vars['losses'].append(valid_vars['total_loss'].item())
            # erase total loss
            total_loss = valid_vars['total_loss'].item()
            valid_vars['total_loss'] = 0
            # append dist loss
            valid_vars['pixel_losses'].append(valid_vars['total_pixel_loss'])
            # erase pixel dist loss
            valid_vars['total_pixel_loss'] = [0] * len(model.joint_ixs)
            # append dist loss of sample from output
            valid_vars['pixel_losses_sample'].append(valid_vars['total_pixel_loss_sample'])
            # erase dist loss of sample from output
            valid_vars['total_pixel_loss_sample'] = [0] * len(model.joint_ixs)
            # check if loss is better
            if valid_vars['losses'][-1] < valid_vars['best_loss']:
                valid_vars['best_loss'] = valid_vars['losses'][-1]
                #print_verbose("  This is a best loss found so far: " + str(valid_vars['losses'][-1]), verbose)
            # log checkpoint
            if control_vars['curr_iter'] % control_vars['log_interval'] == 0:
                trainer.print_log_info(model, optimizer, 1, total_loss, valid_vars, control_vars)
                model_dict = {
                    'model_state_dict': model.state_dict(),
                    'optimizer_state_dict': optimizer.state_dict(),
                    'control_vars': control_vars,
                    'train_vars': valid_vars,
                }
                trainer.save_checkpoint(model_dict,
                                        filename=valid_vars['checkpoint_filenamebase'] +
                                                 str(control_vars['num_iter']) + '.pth.tar')
            # print time lapse
            prefix = 'Validating (Epoch #' + str(1) + ' ' + str(control_vars['curr_epoch_iter']) + '/' +\
                     str(control_vars['tot_iter']) + ')' + ', (Batch ' + str(control_vars['batch_idx']+1) +\
                     '(' + str(control_vars['iter_size']) + ')' + '/' +\
                     str(control_vars['num_batches']) + ')' + ', (Iter #' + str(control_vars['curr_iter']) +\
                     '(' + str(control_vars['batch_size']) + ')' +\
                     ' - log every ' + str(control_vars['log_interval']) + ' iter): '
            control_vars['tot_toc'] = display_est_time_loop(control_vars['tot_toc'] + time.time() - start,
                                                            control_vars['curr_iter'], control_vars['num_iter'],
                                                            prefix=prefix)

            control_vars['curr_iter'] += 1
            control_vars['start_iter'] = control_vars['curr_iter'] + 1
            control_vars['curr_epoch_iter'] += 1


    return valid_vars, control_vars
コード例 #10
0
from recorder import record
from numpy import savetxt as save
from numpy import array
import os
import csv

if not os.path.isfile('test.txt'):
    data = record('Start speaking ...')
    data.reshape(44100)
    save('test.txt', data, delimiter=' ')
else:
    tmp = csv.reader(open('test.txt', 'r'), delimiter=' ')
    data = []
    for i in tmp:
        data.append(float(i[0]))
    data = array(data)
print(data)

import python_speech_features as psf

mfcc = psf.mfcc(data, 44100, nfft=1103)
save('testMfcc.txt', mfcc, delimiter=' ')
print(mfcc.shape)
import visualize as vz

vz.plot(mfcc)
vz.show()
コード例 #11
0
def validate(valid_loader,
             model,
             optimizer,
             valid_vars,
             control_vars,
             verbose=True):
    curr_epoch_iter = 1
    for batch_idx, (data, target) in enumerate(valid_loader):
        control_vars['batch_idx'] = batch_idx
        if batch_idx < control_vars['iter_size']:
            print_verbose(
                "\rPerforming first iteration; current mini-batch: " +
                str(batch_idx + 1) + "/" + str(control_vars['iter_size']),
                verbose,
                n_tabs=0,
                erase_line=True)
        # start time counter
        start = time.time()
        # get data and targetas cuda variables
        target_heatmaps, target_joints, target_handroot = target
        # make target joints be relative
        target_joints = target_joints[:, 3:]
        data, target_heatmaps = Variable(data), Variable(target_heatmaps)
        if valid_vars['use_cuda']:
            data = data.cuda()
            target_joints = target_joints.cuda()
            target_heatmaps = target_heatmaps.cuda()
            target_handroot = target_handroot.cuda()
        # visualize if debugging
        # get model output
        output = model(data)
        # accumulate loss for sub-mini-batch
        if model.cross_entropy:
            loss_func = my_losses.cross_entropy_loss_p_logq
        else:
            loss_func = my_losses.euclidean_loss
        weights_heatmaps_loss, weights_joints_loss = get_loss_weights(
            control_vars['curr_iter'])
        loss, loss_heatmaps, loss_joints = my_losses.calculate_loss_JORNet(
            loss_func, output, target_heatmaps, target_joints,
            valid_vars['joint_ixs'], weights_heatmaps_loss,
            weights_joints_loss, control_vars['iter_size'])
        valid_vars['total_loss'] += loss
        valid_vars['total_joints_loss'] += loss_joints
        valid_vars['total_heatmaps_loss'] += loss_heatmaps
        # accumulate pixel dist loss for sub-mini-batch
        valid_vars[
            'total_pixel_loss'] = my_losses.accumulate_pixel_dist_loss_multiple(
                valid_vars['total_pixel_loss'], output[3], target_heatmaps,
                control_vars['batch_size'])
        valid_vars[
            'total_pixel_loss_sample'] = my_losses.accumulate_pixel_dist_loss_from_sample_multiple(
                valid_vars['total_pixel_loss_sample'], output[3],
                target_heatmaps, control_vars['batch_size'])
        # get boolean variable stating whether a mini-batch has been completed

        for i in range(control_vars['max_mem_batch']):
            filenamebase_idx = (batch_idx * control_vars['max_mem_batch']) + i
            filenamebase = valid_loader.dataset.get_filenamebase(
                filenamebase_idx)

            print('')
            print(filenamebase)

            visualize.plot_image(data[i].data.numpy())
            visualize.show()

            output_batch_numpy = output[7][i].data.cpu().numpy()
            print('\n-------------------------------')
            reshaped_out = output_batch_numpy.reshape((20, 3))
            for j in range(20):
                print('[{}, {}, {}],'.format(reshaped_out[j, 0],
                                             reshaped_out[j, 1],
                                             reshaped_out[j, 2]))
            print('-------------------------------')
            fig, ax = visualize.plot_3D_joints(target_joints[i])
            visualize.plot_3D_joints(output_batch_numpy,
                                     fig=fig,
                                     ax=ax,
                                     color='C6')

            visualize.title(filenamebase)
            visualize.show()

            temp = np.zeros((21, 3))
            output_batch_numpy_abs = output_batch_numpy.reshape((20, 3))
            temp[1:, :] = output_batch_numpy_abs
            output_batch_numpy_abs = temp
            output_joints_colorspace = camera.joints_depth2color(
                output_batch_numpy_abs,
                depth_intr_matrix=synthhands_handler.DEPTH_INTR_MTX,
                handroot=target_handroot[i].data.cpu().numpy())
            visualize.plot_3D_joints(output_joints_colorspace)
            visualize.show()
            aa1 = target_joints[i].data.cpu().numpy().reshape((20, 3))
            aa2 = output[7][i].data.cpu().numpy().reshape((20, 3))
            print('\n----------------------------------')
            print(np.sum(np.abs(aa1 - aa2)) / 60)
            print('----------------------------------')

        #loss.backward()
        valid_vars['total_loss'] += loss
        valid_vars['total_joints_loss'] += loss_joints
        valid_vars['total_heatmaps_loss'] += loss_heatmaps
        # accumulate pixel dist loss for sub-mini-batch
        valid_vars[
            'total_pixel_loss'] = my_losses.accumulate_pixel_dist_loss_multiple(
                valid_vars['total_pixel_loss'], output[3], target_heatmaps,
                control_vars['batch_size'])
        valid_vars[
            'total_pixel_loss_sample'] = my_losses.accumulate_pixel_dist_loss_from_sample_multiple(
                valid_vars['total_pixel_loss_sample'], output[3],
                target_heatmaps, control_vars['batch_size'])
        # get boolean variable stating whether a mini-batch has been completed
        minibatch_completed = (batch_idx + 1) % control_vars['iter_size'] == 0
        if minibatch_completed:
            # append total loss
            valid_vars['losses'].append(valid_vars['total_loss'].data[0])
            # erase total loss
            total_loss = valid_vars['total_loss'].data[0]
            valid_vars['total_loss'] = 0
            # append total joints loss
            valid_vars['losses_joints'].append(
                valid_vars['total_joints_loss'].data[0])
            # erase total joints loss
            valid_vars['total_joints_loss'] = 0
            # append total joints loss
            valid_vars['losses_heatmaps'].append(
                valid_vars['total_heatmaps_loss'].data[0])
            # erase total joints loss
            valid_vars['total_heatmaps_loss'] = 0
            # append dist loss
            valid_vars['pixel_losses'].append(valid_vars['total_pixel_loss'])
            # erase pixel dist loss
            valid_vars['total_pixel_loss'] = [0] * len(model.joint_ixs)
            # append dist loss of sample from output
            valid_vars['pixel_losses_sample'].append(
                valid_vars['total_pixel_loss_sample'])
            # erase dist loss of sample from output
            valid_vars['total_pixel_loss_sample'] = [0] * len(model.joint_ixs)
            # check if loss is better
            #if valid_vars['losses'][-1] < valid_vars['best_loss']:
            #    valid_vars['best_loss'] = valid_vars['losses'][-1]
            #    print_verbose("  This is a best loss found so far: " + str(valid_vars['losses'][-1]), verbose)
            # log checkpoint
            if control_vars['curr_iter'] % control_vars['log_interval'] == 0:
                trainer.print_log_info(model, optimizer, 1, total_loss,
                                       valid_vars, control_vars)
                model_dict = {
                    'model_state_dict': model.state_dict(),
                    'optimizer_state_dict': optimizer.state_dict(),
                    'control_vars': control_vars,
                    'train_vars': valid_vars,
                }
                trainer.save_checkpoint(
                    model_dict,
                    filename=valid_vars['checkpoint_filenamebase'] +
                    str(control_vars['num_iter']) + '.pth.tar')
            # print time lapse
            prefix = 'Validating (Epoch #' + str(1) + ' ' + str(control_vars['curr_epoch_iter']) + '/' +\
                     str(control_vars['tot_iter']) + ')' + ', (Batch ' + str(control_vars['batch_idx']+1) +\
                     '(' + str(control_vars['iter_size']) + ')' + '/' +\
                     str(control_vars['num_batches']) + ')' + ', (Iter #' + str(control_vars['curr_iter']) +\
                     '(' + str(control_vars['batch_size']) + ')' +\
                     ' - log every ' + str(control_vars['log_interval']) + ' iter): '
            control_vars['tot_toc'] = display_est_time_loop(
                control_vars['tot_toc'] + time.time() - start,
                control_vars['curr_iter'],
                control_vars['num_iter'],
                prefix=prefix)

            control_vars['curr_iter'] += 1
            control_vars['start_iter'] = control_vars['curr_iter'] + 1
            control_vars['curr_epoch_iter'] += 1

    return valid_vars, control_vars
コード例 #12
0
def secondOne (st):
	wrd = nextWord (st, hearable = 280, chunk=1024) 
	# print (wrd)
	sd.play (wrd)
	vz.plot (wrd)
	vz.show ()
コード例 #13
0
mesh = None 

with open("test/test.stl","rb") as f:
    gen = stl.read_stl(f)
    next(gen) # ignore the length
    mesh = convert_mesh.triangles_to_mesh(map(stl.points, gen))

zbuf = convex_roughing.construct_intervals(mesh,np.array([0,0,1]))
hull = convex_roughing.hull_on_plane(mesh, zbuf, z, np.array([0,0,1]))


points = list(hull.points[i] for i in hull.vertices)
points.append(points[0])

points.reverse()
polygon = shape.LinearRing(points)


region = shape.LinearRing([(-10,0),(100,0),(100,100),(-10,100)])

c = cell.Cell(region, region, polygon, cell.offsetting)

c.construct()

visualize.show_paths(to_visual(c.emit()))
visualize.show_paths(to_visual([shape.LinearRing([(0,0),(100,0),(100,100),(0,100)])]))

visualize.show_mesh(*mesh)
visualize.show()

コード例 #14
0
import pandas
from k_means_clustering import K_Means_Clustering
from visualize import plot_clusters_2d,show

data_frame = pandas.read_csv("iris.csv", sep=";")
df = data_frame.drop('Species', axis=1)

for column in df.columns.to_list():
    df[column] = df[column].str.replace(",", ".").astype(float)

k_means_wrapper = K_Means_Clustering(df, 2)
centroids, clusters = k_means_wrapper.k_means_algorithm()

sepal_centroids = centroids[["Sepal.Length","Sepal.Width"]]

petal_centroids = centroids[["Petal.Length","Petal.Width"]]

plot_clusters_2d(clusters, sepal_centroids, "Sepal.Length", "Sepal.Width")
plot_clusters_2d(clusters, petal_centroids, "Petal.Length", "Petal.Width")

show()
コード例 #15
0
    # leaf = 0.05
    # downsampled_intf = clouds.uniform_voxelgrid_sample(intf, voxel_size=leaf * 0.5)
    # normal_pts, normals = clouds.robust_normals(downsampled_intf, leaf_size=leaf)

    vertices, normals = test_half_mesh(mesh)
    normal_pts = vertices

    # Build spinimages
    tic = time.time()
    spin_images = build_spinimages(vertices, normal_pts, normals, scale=1.0)
    toc = time.time() - tic
    print 'Spin image library construction took {} seconds'.format(toc)

    # Choose a test image
    # test_spin = spin(intf, normal_pts[0], normals[0])
    for k in range(20):
        n = np.random.randint(len(spin_images))
        test_spin = spin_images[n]

        tic = time.time()
        similarity = np.array(map(lambda q: rpq(test_spin, q), spin_images))
        toc = time.time() - tic
        print 'Similarity estimation took {} seconds'.format(toc)
        print np.min(similarity), np.max(similarity)
        similarity = np.clip(similarity, 0.0, 1.0)

        visualize.color_points3d(normal_pts, similarity, scale_factor=0.1)
        visualize.points3d(normal_pts[n], scale_factor=0.05, opacity=0.7)

        visualize.show(axis_scale=0.2)
コード例 #16
0
def plot_halnet_joints_from_heatmaps(halnet_main_out, img_numpy, filenamebase):
    fig = visualize.create_fig()
    visualize.plot_joints_from_heatmaps(halnet_main_out, fig=fig, data=img_numpy)
    visualize.title('HALNet (joints from heatmaps): ' + filenamebase)
    visualize.show()
コード例 #17
0
def plot_halnet_heatmap(halnet_mainout, img_numpy, heatmap_ix, filenamebase):
    visualize.plot_image_and_heatmap(halnet_mainout[heatmap_ix], data=img_numpy)
    joint_name = synthhands_handler.get_joint_name_from_ix(heatmap_ix)
    visualize.title('HALNet (heatmap for ' + joint_name + '): ' + filenamebase)
    visualize.show()
コード例 #18
0
    def test(self, error_func_list=None, is_visualize=False):
        from demorender import demoAll
        total_task = len(self.test_data)
        print('total img:', total_task)

        model = self.net.model
        total_error_list = []
        num_output = self.mode[3]
        num_input = self.mode[4]
        data_generator = DataGenerator(all_image_data=self.test_data,
                                       mode=self.mode[2],
                                       is_aug=False,
                                       is_pre_read=self.is_pre_read)

        with torch.no_grad():
            model.eval()
            for i in range(len(self.test_data)):
                data = data_generator.__getitem__(i)
                x = data[0]
                x = x.to(self.net.device).float()
                y = [data[j] for j in range(1, 1 + num_input)]
                for j in range(num_input):
                    y[j] = y[j].to(x.device).float()
                    y[j] = torch.unsqueeze(y[j], 0)
                x = torch.unsqueeze(x, 0)
                outputs = model(x, *y)

                p = outputs[-1]
                x = x.squeeze().cpu().numpy().transpose(1, 2, 0)
                p = p.squeeze().cpu().numpy().transpose(1, 2, 0) * 280
                b = sio.loadmat(self.test_data[i].bbox_info_path)
                gt_y = y[0]
                gt_y = gt_y.squeeze().cpu().numpy().transpose(1, 2, 0) * 280

                temp_errors = []
                for error_func_name in error_func_list:
                    error_func = getErrorFunction(error_func_name)
                    error = error_func(gt_y, p, b['Bbox'], b['Kpt'])
                    temp_errors.append(error)
                total_error_list.append(temp_errors)
                print(self.test_data[i].init_image_path, end='  ')
                for er in temp_errors:
                    print('%.5f' % er, end=' ')
                print('')
                if is_visualize:

                    if temp_errors[0] > 0.00:
                        tex = np.load(self.test_data[i].texture_path.replace(
                            'zeroz2', 'full')).astype(np.float32)
                        init_image = np.load(
                            self.test_data[i].cropped_image_path).astype(
                                np.float32) / 255.0
                        show([p, tex, init_image], mode='uvmap')
                        init_image = np.load(
                            self.test_data[i].cropped_image_path).astype(
                                np.float32) / 255.0
                        show([gt_y, tex, init_image], mode='uvmap')
                        demobg = np.load(
                            self.test_data[i].cropped_image_path).astype(
                                np.float32)
                        init_image = demobg / 255.0
                        img1, img2 = demoAll(p, demobg, is_render=False)
                mean_errors = np.mean(total_error_list, axis=0)
                for er in mean_errors:
                    print('%.5f' % er, end=' ')
                print('')
            for i in range(len(error_func_list)):
                print(error_func_list[i], mean_errors[i])

            se_idx = np.argsort(np.sum(total_error_list, axis=-1))
            se_data_list = np.array(self.test_data)[se_idx]
            se_path_list = [a.cropped_image_path for a in se_data_list]
            sep = '\n'
            fout = open('errororder.txt', 'w', encoding='utf-8')
            fout.write(sep.join(se_path_list))
            fout.close()
コード例 #19
0
ファイル: handmapping.py プロジェクト: pauloabelha/handy
hand_gt_filepath = dataset_root + hand_gt_folder + subject + '/' + action + '/' + seq + 'skeleton.txt'

hand_joints = fpa_io.read_action_joints_sequence(hand_gt_filepath)[int(31)]
hand_joints = hand_joints.reshape((21, 3))
hand_joints -= hand_joints[0, :]
hand_joints_unreal = np.copy(hand_joints)

i = 0
for finger_idx in range(5):
    finger_start_joint_idx = (finger_idx * 4) + 1
    for j in range(3):
        parent_joint_idx = finger_start_joint_idx + j
        parent_joint_before = np.copy(hand_joints_unreal[parent_joint_idx, :])
        curr_bone_prop = bone_prop[parent_joint_idx - 1]
        hand_joints_unreal[parent_joint_idx, :] *= curr_bone_prop
        parent_joint_transl = hand_joints_unreal[
            parent_joint_idx, :] - parent_joint_before
        print(
            str(parent_joint_transl) + " " + str(curr_bone_prop) + " " +
            str(1 / curr_bone_prop))
        for k in range(3 - j):
            joint2_idx = parent_joint_idx + k + 1
            hand_joints_unreal[joint2_idx, :] += parent_joint_transl
            print(str(parent_joint_idx) + " " + str(joint2_idx))
            a = 0

fig, ax = vis.plot_3D_joints(hand_joints)
vis.plot_3D_joints(hand_joints_unreal, fig=fig, ax=ax)
vis.show()
コード例 #20
0
    most_bottom = None
    for p in group:
        # print(p)
        if most_left is None or p[0] < most_left:
            most_left = p[0]
        if most_top is None or p[1] < most_top:
            most_top = p[1]
        if most_right is None or p[0] > most_right:
            most_right = p[0]
        if most_bottom is None or p[1] > most_bottom:
            most_bottom = p[1]
        # tl_dist = (tl[0] - p[0]) ** 2 + (tl[1] - p[1]) ** 2
        # br_dist = (br[0] - p[0]) ** 2 + (br[1] - p[1]) ** 2
        #
        # if best_tl_dist is None or best_tl_dist > tl_dist:
        #     best_tl_dist = tl_dist
        #     best_tl_ind = p
        #
        # if best_br_dist is None or best_br_dist > br_dist:
        #     best_br_dist = br_dist
        #     best_br_ind = p

    # bounding_box = [best_tl_ind[1], best_tl_ind[0], best_br_ind[1], best_br_ind[0]]
    # bounding_boxes.append(bounding_box)

    bounding_box = [most_top, most_left, most_bottom, most_right]
    bounding_boxes.append(bounding_box)

viz.visualize_image_with_bounding_boxes(image, bounding_boxes)
viz.show()