Exemple #1
0
def main(argv):
    """
	loads data, initializes network and starts api
	"""
    global Net
    img_codes, captions = load_data()
    captions = splitter(captions)
    print(captions.shape)
    captions = captions.flatten()
    #vocab,n_tokens,word_to_index=make_vocabulary(captions)
    vocab = pickle.load(open("outfile1", "rb"))
    #print(vocab[1])
    n_tokens = len(vocab)
    word_to_index = pickle.load(open("outfile2", "rb"))
    #print(word_to_index.keys()[0:5])
    PAD_ix = -1
    UNK_ix = vocab.index('#UNK#')
    Net = Network(img_codes, n_tokens, captions, vocab, word_to_index)
    Net.Network_init()
    Net.save_weights('model_big.npz', action='load')
    #Net.Network_train(5,100,10)
    #Net.make_caption('sample_images/stas.jpg')
    button1 = Button(root, font="helvetica 15", text="Open file")
    button1.grid(row=0, column=0)
    button1.bind("<Button-1>", Open)

    root.geometry("1000x480")
    root.mainloop()
Exemple #2
0
                type=str)
ap.add_argument('--top_k', default=5, dest="top_k", action="store", type=int)
ap.add_argument('--category_names',
                dest="category_names",
                action="store",
                default='cat_to_name.json')
ap.add_argument('--gpu', default="gpu", action="store", dest="gpu")

pa = ap.parse_args()
path_image = pa.input_img
number_of_outputs = pa.top_k
power = pa.gpu
input_img = pa.input_img
path = pa.checkpoint

training_loader, testing_loader, validation_loader, train_data = network.load_data(
)

model = network.load_checkpoint(path)

with open('cat_to_name.json', 'r') as json_file:
    cat_to_name = json.load(json_file)

# probabilities = network.predict('./flowers/test/1/image_06743.jpg', model, number_of_outputs, power)
probabilities = network.predict(path_image, model, number_of_outputs, power)

labels = [
    cat_to_name[str(index + 1)] for index in np.array(probabilities[1][0])
]
probability = np.array(probabilities[0][0])

i = 0
    def __init__(self, rand_int=0, num_of_samples=None, args=None):
        if args == None:
            args = get_default_parser(num_of_samples)
        self.cov_net = args.cov_net
        self.calc_information = args.calc_information
        self.run_in_parallel = args.run_in_parallel
        self.num_ephocs = args.num_ephocs
        self.learning_rate = args.learning_rate
        self.batch_size = args.batch_size
        self.activation_function = args.activation_function
        self.interval_accuracy_display = args.interval_accuracy_display
        self.save_grads = args.save_grads
        self.num_of_repeats = args.num_of_repeats
        self.calc_information_last = args.calc_information_last
        self.num_of_bins = args.num_of_bins
        self.interval_information_display = args.interval_information_display
        self.save_ws = args.save_ws

        self.name = args.data_dir + args.data_name
        # The arch of the networks
        self.select_network_arch(args.net_type)
        # The percents of the train data samples
        self.train_samples = np.linspace(1, 100,
                                         199)[[[x * 2 - 2 for x in index]
                                               for index in args.inds]]
        # The indexs that we want to calculate the information for them in logspace interval
        self.epochs_indexes = np.unique(
            np.logspace(np.log2(args.start_samples),
                        np.log2(args.num_ephocs),
                        args.num_of_samples,
                        dtype=int,
                        base=2)) - 1
        max_size = np.max(
            [len(layers_size) for layers_size in self.layers_sizes])
        #load data
        self.data_sets = nn.load_data(self.name, args.random_labels)
        #create arrays for saving the data
        self.ws, self.grads, self.information, self.models, self.names, self.networks, self.weights = [
            [[[[None] for k in range(len(self.train_samples))]
              for j in range(len(self.layers_sizes))]
             for i in range(self.num_of_repeats)] for _ in range(7)
        ]

        self.loss_train, self.loss_test,  self.test_error, self.train_error, self.l1_norms, self.l2_norms= \
            [np.zeros((self.num_of_repeats, len(self.layers_sizes), len(self.train_samples), len(self.epochs_indexes))) for _ in range(6)]

        params = {
            'samples_len': len(self.train_samples),
            'num_of_disribuation_samples': args.num_of_disribuation_samples,
            'layersSizes': self.layers_sizes,
            'numEphocs': args.num_ephocs,
            'batch': args.batch_size,
            'numRepeats': args.num_of_repeats,
            'numEpochsInds': len(self.epochs_indexes),
            'LastEpochsInds': self.epochs_indexes[-1],
            'DataName': args.data_name,
            'learningRate': args.learning_rate
        }

        self.name_to_save = args.name + "_" + "_".join(
            [str(i) + '=' + str(params[i]) for i in params])

        params['train_samples'], params['CPUs'], params['directory'], params[
            'epochsInds'] = self.train_samples, NUM_CORES, self.name_to_save, self.epochs_indexes
        self.params = params
        self.rand_int = rand_int

        #If we trained already the network
        self.traind_network = False
Exemple #4
0
                default="vgg13",
                type=str)
ap.add_argument('--hidden_units',
                type=int,
                dest="hidden_units",
                action="store",
                default=120)

pa = ap.parse_args()
where = pa.data_dir
path = pa.save_dir
lr = pa.learning_rate
structure = pa.arch
dropout = pa.dropout
hidden_layer1 = pa.hidden_units
power = pa.gpu
epochs = pa.epochs

trainloader, v_loader, testloader, train_data = network.load_data(where)

model, optimizer, criterion = network.nn_network(structure, dropout,
                                                 hidden_layer1, lr, power)

network.train_network(model, optimizer, criterion, trainloader, v_loader,
                      epochs, 20, power)

network.save_checkpoint(model, train_data, path, structure, hidden_layer1,
                        dropout, lr)

print("All Set and Done. The Model is trained")
Exemple #5
0
 def test_load_data(self):
     X, Y = network.load_data("data/unittest/X.txt", "data/unittest/Y.txt")
     self.assertEqual((64, 4), X.shape)
     self.assertEqual((3, 8), Y.shape)