Beispiel #1
0
    def __init__(self):
        PrepareData.__init__(self)
        self.adadelta_rho = 0.95
        self.opt_epsilon = 1.0
        self.adagrad_initial_accumulator_value = 0.1
        self.adam_beta1 = 0.9
        self.adam_beta2 = 0.999
        self.ftrl_learning_rate_power = -0.5
        self.ftrl_initial_accumulator_value = 0.1
        self.ftrl_l1 = 0.0
        self.ftrl_l2 = 0.0
        self.momentum = 0.9
        self.rmsprop_decay = 0.9
        self.rmsprop_momentum = 0.9
        self.label_smoothing = 0
        self.num_epochs_per_decay = 2.0
        self.end_learning_rate = 0.0001

        self.save_interval_secs = 60 * 60  #one hour
        self.save_summaries_secs = 60
        self.log_every_n_steps = 100
        self.train_dir = './logs'
        self.batch_size = 32
        #optimiser
        self.optimizer = 'rmsprop'
        self.learning_rate = 0.01
        self.learning_rate_decay_type = 'fixed'
        self.max_number_of_steps = None
        self.checkpoint_path = None
        self.checkpoint_exclude_scopes = None
        self.ignore_missing_vars = False

        self.config_training()
        return
Beispiel #2
0
    def __init__(self):
        PrepareData.__init__(self)

        self.batch_size = 8
        self.labels_offset = 0
        self.eval_image_size = None
        self.preprocessing_name = None
        self.model_name = 'vgg-ssd'

        self.num_preprocessing_threads = 4

        self.checkpoint_path = None
        self.eval_dir = None

        return
Beispiel #3
0
    def __init__(self):
        PrepareData.__init__(self)

        self.num_epochs_per_decay = 8.0
        self.learning_rate_decay_type = 'exponential'
        self.end_learning_rate = 0.0001
        self.learning_rate = 0.1

        #optimiser
        self.optimizer = 'rmsprop'

        self.adadelta_rho = 0.95
        self.opt_epsilon = 1.0
        self.adagrad_initial_accumulator_value = 0.1
        self.adam_beta1 = 0.9
        self.adam_beta2 = 0.999
        self.ftrl_learning_rate_power = -0.5
        self.ftrl_initial_accumulator_value = 0.1
        self.ftrl_l1 = 0.0
        self.ftrl_l2 = 0.0
        self.momentum = 0.9

        self.rmsprop_decay = 0.9
        self.rmsprop_momentum = 0.9

        self.train_dir = '/tmp/tfmodel/'
        self.max_number_of_steps = None

        self.checkpoint_path = None
        self.checkpoint_exclude_scopes = None
        self.ignore_missing_vars = False

        self.batch_size = 1

        self.save_interval_secs = 60 * 60 * 1  #one hour
        self.save_summaries_secs = 30

        self.learning_rate_decay_factor = 0.5

        self.label_smoothing = 0
        return
    def __init__(self):

        PrepareData.__init__(self)
        return
Beispiel #5
0
 def __init__(self):
     PrepareData.__init__(self)  
     self.batch_size = 32
     
     return