from search_algorithms import AgingEvoSearch def lr_schedule(epoch): if 0 <= epoch < 90: return 0.01 if 90 <= epoch < 105: return 0.005 return 0.001 search_algorithm = AgingEvoSearch training_config = TrainingConfig( dataset=CIFAR10(), optimizer=lambda: tfa.optimizers.SGDW( learning_rate=0.01, momentum=0.9, weight_decay=1e-5), batch_size=128, epochs=130, callbacks=lambda: [LearningRateScheduler(lr_schedule)], ) search_config = AgingEvoConfig(search_space=CnnSearchSpace(dropout=0.15), rounds=6000, checkpoint_dir="artifacts/cnn_cifar10") bound_config = BoundConfig(error_bound=0.18, peak_mem_bound=75000, model_size_bound=75000, mac_bound=30000000)
from search_algorithms import AgingEvoSearch search_algorithm = AgingEvoSearch def lr_schedule(epoch): if 0 <= epoch < 35: return 0.01 return 0.005 training_config = TrainingConfig( dataset=Chars74K("/datasets/chars74k", img_size=(48, 48)), epochs=60, batch_size=80, optimizer=lambda: tfa.optimizers.SGDW(learning_rate=0.01, momentum=0.9, weight_decay=0.0001), callbacks=lambda: [LearningRateScheduler(lr_schedule)] ) search_config = AgingEvoConfig( search_space=CnnSearchSpace(dropout=0.15), checkpoint_dir="artifacts/cnn_chars74k" ) bound_config = BoundConfig( error_bound=0.3, peak_mem_bound=10000, model_size_bound=20000, mac_bound=1000000 )
def lr_schedule(epoch): if 0 <= epoch < 20: return 0.0005 if 20 <= epoch < 40: return 0.0001 return 0.00002 training_config = TrainingConfig( dataset=SpeechCommands("/datasets/speech_commands_v0.02"), epochs=45, batch_size=50, optimizer=lambda: AdamW(lr=0.0005, weight_decay=1e-5), callbacks=lambda: [ LearningRateScheduler(lr_schedule) ] ) search_config = AgingEvoConfig( search_space=CnnSearchSpace(), rounds=2000, checkpoint_dir="artifacts/cnn_speech_commands" ) bound_config = BoundConfig( error_bound=0.085, peak_mem_bound=60000, model_size_bound=40000, mac_bound=20000000, )
import tensorflow_addons as tfa from cnn import CnnSearchSpace from config import AgingEvoConfig, TrainingConfig, BoundConfig from dataset import MNIST from search_algorithms import AgingEvoSearch search_algorithm = AgingEvoSearch training_config = TrainingConfig( dataset=MNIST(), epochs=30, batch_size=128, optimizer=lambda: tfa.optimizers.SGDW(learning_rate=0.005, momentum=0.9, weight_decay=4e-5), callbacks=lambda: [], ) search_config = AgingEvoConfig( search_space=CnnSearchSpace(), checkpoint_dir="artifacts/cnn_mnist" ) bound_config = BoundConfig( error_bound=0.035, peak_mem_bound=2500, model_size_bound=4500, mac_bound=30000000 )