Exemplo n.º 1
0
def load_data():
    vec = DictVectorizer()
    file_pairs = []
    for i in range(2014, 2019):
        file_pairs.append([('data/players_' + str(i) + '.csv'), ('data/all_stars_' + str(i) + '.txt'), i])
    var, labels, names = get_data(file_pairs)
    vec = vec.fit_transform(var).toarray()
    labels = np.array(labels)
    vec = np.array(vec)
    return vec, labels, names
Exemplo n.º 2
0
def init_players():
	players = import_data.get_data("KRates.csv")
	lg_avg = set_lg_avg_stats(players)
	id = "andeg001"
	set_avg(players[id].stats["SO"])
	set_w_avg(players[id].stats["SO"])
	set_w_var(players[id].stats["SO"])
	set_uncertainty(players[id].stats["SO"])
	calc_projection(players[id].stats["SO"],lg_avg)
	print_results(players[id],lg_avg)
Exemplo n.º 3
0
def init_players():
    players = import_data.get_data("KRates.csv")
    lg_avg = set_lg_avg_stats(players)
    id = "andeg001"
    set_avg(players[id].stats["SO"])
    set_w_avg(players[id].stats["SO"])
    set_w_var(players[id].stats["SO"])
    set_uncertainty(players[id].stats["SO"])
    calc_projection(players[id].stats["SO"], lg_avg)
    print_results(players[id], lg_avg)
Exemplo n.º 4
0
def main(argv):
    initial_dataset = import_data.get_data()
    processing_dataset = import_data.get_data()

    processing_dataset = centroids.randomize_centroids(processing_dataset, number_of_centroids)

    verification_results = do_clustering(processing_dataset)

    while True:
        if is_clustering_successful(verification_results):
            output_success_rates(processing_dataset, initial_dataset)
            break
        elif is_too_many_attempts():
            print("TOO MANY ATTEMPTS")
            break
        else:
            centroids_ids = list(map(lambda x: x["id"], verification_results))
            processing_dataset = centroids.setup_centroids_new(processing_dataset, verification_results)
            verification_results = do_clustering(processing_dataset)
    print("FINAL, ITERATIONS COUNT: ", iteration)
    print("TIME DURATION %s seconds" % (time.time() - start_time))
Exemplo n.º 5
0
'''

import matplotlib.pyplot as plt
import import_data as imp_data
import plot_trap_site as plt_trap
import config
import sys
import numpy as np
import temperature_single_atom as sim
'''
Here get Boolean data.
Sequence of zeros and ones
corresponding to trap site occupied or not. That simple.
'''
x_values, initial_bool, assembled_bool, assembled_list_bool, recapture_bool, atom_loss_bool = imp_data.get_data(
)

# Plot Histogram with number of atoms in each run
plt_trap.atom_number(initial_bool)
'''
Plot occupation probability for each trap site.
'''
plt_trap.init_plot(initial_bool)

calc_diff = False
if calc_diff == True:
    # If True will plot image on 'assembled bool - initial'
    plt_trap.diff_plot(initial_bool, assembled_bool)

# Filter data for rearrangement ??
filter = False
# This file creates and compuets the norms for all the layers present in a model

# Import the uwimg library
from uwimg import * 

from model_definition import softmax_model
from import_data import get_data
# FIRST LOAD THE DATA
train_data = get_data()[0]
test_data = get_data()[1]


# Now iterate through the model layers
def iterate_over(model, iters, batchsize):
	# Create a model
	for iter in range(iters):
		calculate_G1(Model, data.X, data)
		print_matrix(Model.G1)
		psi = create_psi(data.X, data.y, 0.1)
		calculate_G2(Model, train.X, data, psi)
		# Now calculate the running average of Psi for all the layers in the model
		norm1 = 0
		for layers in Model.n:
			psi = running_average(psi, (Model.layers+iter).G1, (Model.layers+iter).G2)
			norm1 = calculate_delta_norm(norm1 ,psi)

	
if __name__ == "__main__":
	batchsize = 128
	data = random_batch(test_data ,batchsize)
	Model = softmax_model(data.X.cols, data.y.cols)
                nuc_list.append(nuc)
                cyto_list.append(cyto)
                nuc_cyto_list.append(cv2.bitwise_or(nuc, cyto))

    return nuc_list, cyto_list, nuc_cyto_list, names, discard


def apply_mask(img_gray_list, mask_list):
    out = []
    for idx, each in enumerate(img_gray_list):
        out.append(cv2.bitwise_and(each, mask_list[idx]))

    return out


segmented_list = import_data.get_data(PATH_TO_EXPERT_HERLEV_SEGMENTATION)
cell_image_list = import_data.get_data(PATH_TO_HERLEV_IMGS)
images_list, name_and_label_imgs_og = zip(*cell_image_list)
segmented_masks, name_and_label_exp_segment = zip(*segmented_list)

cv2.imshow(name_and_label_exp_segment[EVAL_INDEX], segmented_masks[EVAL_INDEX])
cv2.imshow(name_and_label_imgs_og[EVAL_INDEX], images_list[EVAL_INDEX])
# cv2.waitKey()
b_list_seg, g_list_seg, r_list_seg = separate_into_channels(segmented_masks)

compare = np.hstack(
    [b_list_seg[EVAL_INDEX], g_list_seg[EVAL_INDEX], r_list_seg[EVAL_INDEX]])
cv2.imshow('check', compare)

gray_list = rgb_to_gray(images_list)