def __init__(self, argv, reactor, objective): """ Constructor. """ # Initialize the Optimizer object. Optimizer.__init__(self, argv, reactor, objective) # Initialize the PGA object. PGA.__init__(self, argv, PGA.DATATYPE_INTEGER, self.reactor.number_bundles(), PGA.MAXIMIZE) # Set default operators. self.SetCrossover(self.htbx) # Crossover self.SetEndOfGen(self.end_of_iteration) # End of generation info self.SetInitString(self.init) # String initialization self.SetMutation(self.swap) # Mutation via a single swap # Set default values. self.maximum_generations = 100 # Maximum generations self.population_size = 50 # Population size self.number_replaced = 40 # Number replaced each generation self.seed = 123 # PGA random number seed self.np_seed = 123 # NumPy random number seed self.binary_sweep = False # Perform one sweep of binary exchanges # Optimizer-specific flags. self.track_best = False self.fixed_central = True # Counter for evaluations on each process. self.evals = 0
def __init__(self, mod, value, seed, dist, directory, full): Optimizer.__init__(self, value) self.rand = random.Random(seed) self.model = mod self.dist = dist self.directory = directory self.full = full
def __init__(self, target_dynamic_path, target_static_path, config): Optimizer.__init__(self, tf.Graph(), 256, 8, target_dynamic_path, target_static_path, config) with self.graph.as_default(): with tf.device('/gpu:' + str(self.user_config['gpu'])): # load dynamic texture imgs = load_images(target_dynamic_path, size=(self.input_frame_count, self.input_dimension, self.input_dimension)) self.target_dynamic_texture = [ tf.to_float( tf.constant( img.reshape(1, self.input_dimension, self.input_dimension, 3))) for img in imgs ] # load static texture (for dynamics style transfer) img = load_image(target_static_path, size=(self.input_dimension, self.input_dimension)) self.target_static_texture = tf.to_float( tf.constant( img.reshape(1, self.input_dimension, self.input_dimension, 3))) # TODO: check for b/w input # initialize noise initial_noise = tf.random_normal([ self.user_config['batch_size'], self.input_frame_count, self.input_dimension, self.input_dimension, 3 ]) self.output = tf.Variable(initial_noise, name='output') # TODO: let weight be user-definable # build appearance descriptors (one for each frame) self.appearance_loss = \ self.build_appearance_descriptors( 'appearance_descriptors', 1e9) # TODO: let weight be user-definable # build dynamics descriptors (one for each pair of # frames) self.dynamics_loss = \ self.build_dynamics_descriptors('dynamics_descriptors', 1e15) # evaluate dynamic texture loss self.dyntex_loss = tf.add(self.appearance_loss, self.dynamics_loss) # averaging loss over batch self.dyntex_loss = tf.div(self.dyntex_loss, self.user_config['batch_size']) # attach summaries self.attach_summaries('summaries')
def __init__(self, model: Model, hyperparameters: dict, verbose: bool = True): Optimizer.__init__(self, model=model, hyperparameters=hyperparameters, verbose=verbose) self.optimizer_name = hyperparameters[OPTIMIZER_NAME] self.batch_size = hyperparameters[BATCH_SIZE] self.epochs = hyperparameters[EPOCHS] self.learning_rate = hyperparameters[LEARNING_RATE] self.decay1, self.decay2 = hyperparameters[DECAY_1], hyperparameters[ DECAY_2] self.regularizer_name = hyperparameters[REGULARIZER_NAME] self.regularizer_weight = hyperparameters[REGULARIZER_WEIGHT] # build all the supported optimizers using the passed params (learning rate and decays if Adam) supported_optimizers = { 'Adagrad': optim.Adagrad(params=self.model.parameters(), lr=self.learning_rate), 'Adam': optim.Adam(params=self.model.parameters(), lr=self.learning_rate, betas=(self.decay1, self.decay2)), 'SGD': optim.SGD(params=self.model.parameters(), lr=self.learning_rate) } # build all the supported regularizers using the passed regularizer_weight supported_regularizers = { 'N3': N3(weight=self.regularizer_weight), 'N2': N2(weight=self.regularizer_weight) } # choose the Torch Optimizer object to use, based on the passed name self.optimizer = supported_optimizers[self.optimizer_name] # choose the regularizer self.regularizer = supported_regularizers[self.regularizer_name]
def __init__(self, schema): Optimizer.__init__(self, schema) self.optimizations = defaultdict(list)
def __init__(self, argv, reactor, objective): """ Constructor. """ # Initialize the Optimizer object. Optimizer.__init__(self, argv, reactor, objective)
def __init__(self, binary): logging.info("聚合器初始化") Optimizer.__init__(self, binary) # 初始化优化器 for func in self.get_all_functions(): self.__convert_function(func)
def __init__(self, func): Optimizer.__init__(self, func)
def __init__(self, binary): Optimizer.__init__(self, binary) for func in self.get_all_functions(): self.__convert_function(func)
def __init__(self, binary, is_construct=False): Optimizer.__init__(self, binary, is_construct) for func in self.get_all_functions(): self.__convert_function(func)
def __init__(self, model, cost, num_epochs=10, batch_size=2, lr=0.15): Optimizer.__init__(self, model, cost, num_epochs) self.batch_size = batch_size self.lr = lr