Esempio n. 1
0
    def train_models(
            self,
            partitions_path,
            graphs_path,
            init_lr=1e-4,
            train_batch_size=32,
            valid_batch_size=32,
            epoch_batch=5,
            epochs=100,
            checkpoint_path="checkpoint/aux_30_epoch_",  # This version only looks at race + age (not gender)
    ):
        with open(partitions_path, "rb") as fp:  # Unpickling
            set_partitions = pickle.load(fp)
        # Training Auxiliary Models!
        for i in range(0, int(epochs / epoch_batch)):
            data_generator_aux = UtkFaceDataGeneratorAuxOneModel(
                self.dataset_folder_name,
                set_partitions[i],
                self.dataset_dict,
                num_classes=self.num_classes)
            aux_train_idx, aux_valid_idx, _ = data_generator_aux.generate_split_indexes(
            )

            train_images_collection, train_status_collection = data_generator_aux.pre_generate_images(
                aux_train_idx, batch_size=train_batch_size)

            valid_images_collection, valid_status_collection = data_generator_aux.pre_generate_images(
                aux_valid_idx, batch_size=valid_batch_size)
            aux_train_gen = data_generator_aux.generate_images(
                is_training=True,
                images_collection=train_images_collection,
                status_collection=train_status_collection)
            aux_valid_gen = data_generator_aux.generate_images(
                is_training=True,
                images_collection=valid_images_collection,
                status_collection=valid_status_collection)

            aux_model = self.build_model()
            es = EarlyStopping(monitor='val_loss', mode='min', patience=10)
            profiler = Profiler()
            profiler.start()
            history = aux_model.fit(
                aux_train_gen,
                steps_per_epoch=len(aux_train_idx) // train_batch_size,
                epochs=(i + 1) * 5,
                validation_data=aux_valid_gen,
                validation_steps=len(aux_valid_idx) // valid_batch_size,
                callbacks=[es])
            profiler.stop()
            profiler.print(show_all=True)

            aux_model.save(str(checkpoint_path) + "_" + str((i + 1) * 5))
            y = history.history['val_loss']
            plt.plot([i for i in range(len(y))], history.history['val_loss'])
            plt.title("Auxiliary Model Validation Loss - {} Epochs".format(
                (i + 1) * 5))
            plt.savefig(graphs_path / "aux_30_epoch_val_loss_{}".format(
                (i + 1) * 5))
Esempio n. 2
0
    def train_model(self,
                    init_lr=1e-4,
                    train_batch_size=16,
                    valid_batch_size=16,
                    epoch_batch=5,
                    epochs=100,
                    checkpoint_dir="checkpoint/base_epochs_"):
        opt = Adam(lr=init_lr, decay=init_lr / epochs)
        self.model.compile(optimizer=opt,
                           loss={
                               'age_output': 'mse',
                               'race_output': 'categorical_crossentropy',
                               'gender_output': 'binary_crossentropy'
                           },
                           loss_weights={
                               'age_output': 4.,
                               'race_output': 1.5,
                               'gender_output': 0.1
                           },
                           metrics={
                               'age_output': 'mae',
                               'race_output': 'accuracy',
                               'gender_output': 'accuracy'
                           })

        for i in range(int(epochs / epoch_batch)):
            current_checkpoint = checkpoint_dir / "base_epochs_{}".format(
                str((i + 1) * epoch_batch))
            if i != 0:
                self.model = load_model(
                    checkpoint_dir /
                    "base_epochs_{}".format(str((i) * epoch_batch)))
            print(len(self.train_idx), len(self.valid_idx))
            train_images_collection, train_status_collection = self.data_generator.pre_generate_images(
                self.train_idx, batch_size=train_batch_size)

            valid_images_collection, valid_status_collection = self.data_generator.pre_generate_images(
                self.valid_idx, batch_size=valid_batch_size)
            train_gen = self.data_generator.generate_images(
                is_training=True,
                images_collection=train_images_collection,
                status_collection=train_status_collection)
            valid_gen = self.data_generator.generate_images(
                is_training=True,
                images_collection=valid_images_collection,
                status_collection=valid_status_collection)
            profiler = Profiler()
            profiler.start()
            history = self.model.fit_generator(
                train_gen,
                steps_per_epoch=len(self.train_idx) // train_batch_size,
                # epochs=epoch_batch,
                epochs=1,
                validation_data=valid_gen,
                validation_steps=len(self.valid_idx) // valid_batch_size)
            profiler.stop()
            profiler.print(show_all=True)
Esempio n. 3
0
def test_performance():
    from pyinstrument import Profiler

    p = Profiler()
    with p:
        decoder = InputDecoder(TETHER_ABI)
        for _ in range(10000):
            func_call = decoder.decode_function(
                (
                    "0xa9059cbb000000000000000000000000f050227be1a7ce587aa83d5013f900dbc3be"
                    "0611000000000000000000000000000000000000000000000000000000000ecdd350"
                ),
            )
            assert func_call.name == "transfer"
    p.print()
Esempio n. 4
0
import numpy as np

from pyinstrument import Profiler
from vector import Vector2D


profiler = Profiler()
profiler.start()

for _ in range(100_000):
    v1 = Vector2D(np.random.randint(-10, 10), np.random.randint(-10, 10))
    v2 = Vector2D(np.random.randint(-10, 10), np.random.randint(-10, 10))
    c3 = v1 + v2  # noqa

profiler.stop()
profiler.print()