def __init__(self, keras_model, worker_optimizer, loss, metrics=["accuracy"], num_workers=2, batch_size=32, features_col="features", label_col="label", num_epoch=1, master_port=5000, class_weight={ 0: 0.50598991, 1: 42.23686434 }, loss_weights=None): super(DistributedTrainer, self).__init__(keras_model, loss, worker_optimizer, metrics, loss_weights) self.num_workers = num_workers self.batch_size = batch_size self.features_column = features_col self.label_column = label_col self.num_epoch = num_epoch self.parameter_server = None self.parameter_server_thread = None self.master_host = determine_host_address() self.master_port = master_port self.class_weight = class_weight self.learning_rate = 1.0
def __init__(self, keras_model, worker_optimizer, loss, num_workers=2, batch_size=32, features_col="features", label_col="label", num_epoch=1): super(DistributedTrainer, self).__init__(keras_model, loss, worker_optimizer) self.num_workers = num_workers self.batch_size = batch_size self.features_column = features_col self.label_column = label_col self.num_epoch = num_epoch self.parameter_server = None self.parameter_server_thread = None self.master_host = determine_host_address() self.master_port = 5000
def __init__(self, keras_model, worker_optimizer, loss, metrics=["accuracy"], num_workers=2, batch_size=32, features_col="features", label_col="label", num_epoch=1, master_port=5000, loss_weights=None): super(DistributedTrainer, self).__init__(keras_model, loss, worker_optimizer, metrics, loss_weights) self.num_workers = num_workers self.batch_size = batch_size self.features_column = features_col self.label_column = label_col self.num_epoch = num_epoch self.parameter_server = None self.parameter_server_thread = None self.master_host = determine_host_address() self.master_port = master_port self.learning_rate = 1.0
def determine_new_master(self): """Sets the new master address to the current host.""" self.master_host = determine_host_address()