Exemplo n.º 1
0
    def __init__(self, experiment_name: str, search_config: AgingEvoConfig,
                 training_config: TrainingConfig, bound_config: BoundConfig):
        self.log = logging.getLogger(
            name=f"AgingEvoSearch [{experiment_name}]")
        self.config = search_config
        self.trainer = ModelTrainer(training_config)

        self.root_dir = Path(search_config.checkpoint_dir)
        self.root_dir.mkdir(parents=True, exist_ok=True)
        self.experiment_name = experiment_name

        if training_config.pruning and not training_config.pruning.structured:
            self.log.warning(
                "For unstructured pruning, we can only meaningfully use the model "
                "size resource metric.")
            bound_config.peak_mem_bound = None
            bound_config.mac_bound = None
        self.pruning = training_config.pruning

        # We establish an order of objective in the feature vector, all functions must ensure the order is the same
        self.constraint_bounds = [
            bound_config.error_bound, bound_config.peak_mem_bound,
            bound_config.model_size_bound, bound_config.mac_bound
        ]

        self.history: List[EvaluatedPoint] = []
        self.population: List[EvaluatedPoint] = []

        self.population_size = search_config.population_size
        self.initial_population_size = search_config.initial_population_size or self.population_size
        self.rounds = search_config.rounds
        self.sample_size = search_config.sample_size
        num_gpus = len(tf.config.experimental.list_physical_devices("GPU"))
        self.max_parallel_evaluations = search_config.max_parallel_evaluations or num_gpus
Exemplo n.º 2
0
    def __init__(self,
                 experiment_name: str,
                 search_config: BayesOptConfig,
                 training_config: TrainingConfig,
                 bound_config: BoundConfig):
        assert search_config.starting_points >= 1

        self.log = logging.getLogger(name=f"BayesOpt [{experiment_name}]")
        self.config = search_config
        self.trainer = ModelTrainer(training_config)

        self.root_dir = Path(search_config.checkpoint_dir)
        self.root_dir.mkdir(parents=True, exist_ok=True)
        self.experiment_name = experiment_name

        if training_config.pruning and not training_config.pruning.structured:
            self.log.warning("For unstructured pruning, we can only use the model size resource metric.")
            bound_config.peak_mem_bound = None
            bound_config.mac_bound = None

        # We establish an order of objective in the feature vector, all functions must ensure the order is the same
        self.constraint_bounds = [bound_config.error_bound, bound_config.peak_mem_bound,
                                  bound_config.model_size_bound, bound_config.mac_bound]
Exemplo n.º 3
0
from search_algorithms import AgingEvoSearch


def lr_schedule(epoch):
    if 0 <= epoch < 90:
        return 0.01
    if 90 <= epoch < 105:
        return 0.005
    return 0.001


search_algorithm = AgingEvoSearch

training_config = TrainingConfig(
    dataset=CIFAR10(),
    optimizer=lambda: tfa.optimizers.SGDW(
        learning_rate=0.01, momentum=0.9, weight_decay=1e-5),
    batch_size=128,
    epochs=130,
    callbacks=lambda: [LearningRateScheduler(lr_schedule)],
)

search_config = AgingEvoConfig(search_space=CnnSearchSpace(dropout=0.15),
                               rounds=6000,
                               checkpoint_dir="artifacts/cnn_cifar10")

bound_config = BoundConfig(error_bound=0.18,
                           peak_mem_bound=75000,
                           model_size_bound=75000,
                           mac_bound=30000000)
def lr_schedule(epoch):
    if 0 <= epoch < 20:
        return 0.0005
    if 20 <= epoch < 40:
        return 0.0001
    return 0.00002


training_config = TrainingConfig(
    dataset=SpeechCommands("/datasets/speech_commands_v0.02"),
    epochs=45,
    batch_size=50,
    optimizer=lambda: AdamW(lr=0.0005, weight_decay=1e-5),
    callbacks=lambda: [
        LearningRateScheduler(lr_schedule)
    ]
)

search_config = AgingEvoConfig(
    search_space=CnnSearchSpace(),
    rounds=2000,
    checkpoint_dir="artifacts/cnn_speech_commands"
)

bound_config = BoundConfig(
    error_bound=0.085,
    peak_mem_bound=60000,
    model_size_bound=40000,
    mac_bound=20000000,
)
Exemplo n.º 5
0
from search_algorithms import AgingEvoSearch

search_algorithm = AgingEvoSearch


def lr_schedule(epoch):
    if 0 <= epoch < 35:
        return 0.01
    return 0.005


training_config = TrainingConfig(
    dataset=Chars74K("/datasets/chars74k", img_size=(48, 48)),
    epochs=60,
    batch_size=80,
    optimizer=lambda: tfa.optimizers.SGDW(learning_rate=0.01, momentum=0.9, weight_decay=0.0001),
    callbacks=lambda: [LearningRateScheduler(lr_schedule)]
)

search_config = AgingEvoConfig(
    search_space=CnnSearchSpace(dropout=0.15),
    checkpoint_dir="artifacts/cnn_chars74k"
)

bound_config = BoundConfig(
    error_bound=0.3,
    peak_mem_bound=10000,
    model_size_bound=20000,
    mac_bound=1000000
)
Exemplo n.º 6
0
from config import PruningConfig, BoundConfig
from configs.cnn_mnist_aging import training_config, search_config, search_algorithm

training_config.pruning = PruningConfig(structured=False,
                                        start_pruning_at_epoch=3,
                                        finish_pruning_by_epoch=18,
                                        min_sparsity=0.2,
                                        max_sparsity=0.98)

bound_config = BoundConfig(error_bound=0.025,
                           peak_mem_bound=None,
                           model_size_bound=1000,
                           mac_bound=None)
Exemplo n.º 7
0
from config import PruningConfig, BoundConfig
from configs.cnn_cifar10_aging import training_config, search_config, search_algorithm

training_config.pruning = PruningConfig(structured=True,
                                        start_pruning_at_epoch=90,
                                        finish_pruning_by_epoch=120,
                                        min_sparsity=0.1,
                                        max_sparsity=0.90)

bound_config = BoundConfig(error_bound=0.10,
                           peak_mem_bound=50000,
                           model_size_bound=50000,
                           mac_bound=60000000)
Exemplo n.º 8
0
import tensorflow_addons as tfa

from cnn import CnnSearchSpace
from config import AgingEvoConfig, TrainingConfig, BoundConfig
from dataset import MNIST
from search_algorithms import AgingEvoSearch

search_algorithm = AgingEvoSearch

training_config = TrainingConfig(
    dataset=MNIST(),
    epochs=30,
    batch_size=128,
    optimizer=lambda: tfa.optimizers.SGDW(learning_rate=0.005, momentum=0.9, weight_decay=4e-5),
    callbacks=lambda: [],
)

search_config = AgingEvoConfig(
    search_space=CnnSearchSpace(),
    checkpoint_dir="artifacts/cnn_mnist"
)

bound_config = BoundConfig(
    error_bound=0.035,
    peak_mem_bound=2500,
    model_size_bound=4500,
    mac_bound=30000000
)