Beispiel #1
0
    def parse_config_common(self, arguments):
        # parse batch_size, momentum, learning rate and regularization
        if arguments.has_key('batch_size'):
            self.batch_size = int(arguments['batch_size'])
        if arguments.has_key('momentum'):
            self.momentum = float(arguments['momentum'])
        if arguments.has_key('lrate'):
            self.lrate = parse_lrate(arguments['lrate'])
        if arguments.has_key('l1_reg'):
            self.l1_reg = float(arguments['l1_reg'])
        if arguments.has_key('l2_reg'):
            self.l2_reg = float(arguments['l2_reg'])
        if arguments.has_key('max_col_norm'):
            self.max_col_norm = float(arguments['max_col_norm'])

        # parse activation function, including maxout
        if arguments.has_key('activation'):
            self.activation_text = arguments['activation']
            self.activation = parse_activation(arguments['activation'])
            if arguments['activation'].startswith('maxout'):
                self.do_maxout = True
                self.pool_size = int(arguments['activation'].replace('maxout:',''))
                self.activation_text = 'maxout'

        # parse dropout. note that dropout can be applied to the input features only when dropout is also
        # applied to hidden-layer outputs at the same time. that is, you cannot apply dropout only to the
        # input features
        if arguments.has_key('dropout_factor'):
            self.do_dropout = True
            factors = arguments['dropout_factor'].split(',')
            self.dropout_factor = [float(factor) for factor in factors]
            if arguments.has_key('input_dropout_factor'):
                self.input_dropout_factor = float(arguments['input_dropout_factor'])
        #Added by me. Will see how this works in practice
        if arguments.has_key('regression'):
            self.do_regression = True

        if arguments.has_key('cfg_output_file'):
            self.cfg_output_file = arguments['cfg_output_file']
        if arguments.has_key('param_output_file'):
            self.param_output_file = arguments['param_output_file']
        if arguments.has_key('kaldi_output_file'):
            self.kaldi_output_file = arguments['kaldi_output_file']

        if arguments.has_key('model_save_step'):
            self.model_save_step = int(arguments['model_save_step'])

        if arguments.has_key('non_updated_layers'):
            layers = arguments['non_updated_layers'].split(",")
            self.non_updated_layers = [int(layer) for layer in layers]
Beispiel #2
0
    def parse_config_common(self, arguments):
        # parse batch_size, momentum, learning rate and regularization
        if arguments.has_key('batch_size'):
            self.batch_size = int(arguments['batch_size'])
        if arguments.has_key('momentum'):
            self.momentum = float(arguments['momentum'])
        if arguments.has_key('lrate'):
            self.lrate = parse_lrate(arguments['lrate'])
        if arguments.has_key('l1_reg'):
            self.l1_reg = float(arguments['l1_reg'])
        if arguments.has_key('l2_reg'):
            self.l2_reg = float(arguments['l2_reg'])
        if arguments.has_key('max_col_norm'):
            self.max_col_norm = float(arguments['max_col_norm'])

        # parse activation function, including maxout
        if arguments.has_key('activation'):
            self.activation_text = arguments['activation']
            self.activation = parse_activation(arguments['activation'])
            if arguments['activation'].startswith('maxout'):
                self.do_maxout = True
                self.pool_size = int(arguments['activation'].replace(
                    'maxout:', ''))
                self.activation_text = 'maxout'

        # parse dropout. note that dropout can be applied to the input features only when dropout is also
        # applied to hidden-layer outputs at the same time. that is, you cannot apply dropout only to the
        # input features
        if arguments.has_key('dropout_factor'):
            self.do_dropout = True
            factors = arguments['dropout_factor'].split(',')
            self.dropout_factor = [float(factor) for factor in factors]
            if arguments.has_key('input_dropout_factor'):
                self.input_dropout_factor = float(
                    arguments['input_dropout_factor'])

        if arguments.has_key('cfg_output_file'):
            self.cfg_output_file = arguments['cfg_output_file']
        if arguments.has_key('param_output_file'):
            self.param_output_file = arguments['param_output_file']
        if arguments.has_key('kaldi_output_file'):
            self.kaldi_output_file = arguments['kaldi_output_file']

        if arguments.has_key('model_save_step'):
            self.model_save_step = int(arguments['model_save_step'])

        if arguments.has_key('non_updated_layers'):
            layers = arguments['non_updated_layers'].split(",")
            self.non_updated_layers = [int(layer) for layer in layers]
Beispiel #3
0
    def parse_config_common(self, arguments):
        # parse batch_size, momentum and learning rate 
        if arguments.has_key('batch_size'):
            self.batch_size = int(arguments['batch_size'])
        if arguments.has_key('momentum'):
            self.momentum = float(arguments['momentum'])
        if arguments.has_key('lrate'):
            self.lrate = parse_lrate(arguments['lrate'])

        # parse activation function, including maxout
        if arguments.has_key('activation'):
            self.activation_text = arguments['activation']
            self.activation = parse_activation(arguments['activation'])
            if arguments['activation'].startswith('maxout'):
                self.do_maxout = True
                self.pool_size = int(arguments['activation'].replace('maxout:',''))
                self.activation_text = 'maxout'

        # parse dropout. note that dropout can be applied to the input features only when dropout is also
        # applied to hidden-layer outputs at the same time. that is, you cannot apply dropout only to the
        # input features
        if arguments.has_key('dropout_factor'):
            self.do_dropout = True
            factors = arguments['dropout_factor'].split(',')
            self.dropout_factor = [float(factor) for factor in factors]
            if arguments.has_key('input_dropout_factor'):
                self.input_dropout_factor = float(arguments['input_dropout_factor'])

        if arguments.has_key('cfg_output_file'):
            self.cfg_output_file = arguments['cfg_output_file'] 
        if arguments.has_key('param_output_file'):
            self.param_output_file = arguments['param_output_file']
        if arguments.has_key('kaldi_output_file'):
            self.kaldi_output_file = arguments['kaldi_output_file']

        if arguments.has_key('model_save_step'):
            self.model_save_step = int(arguments['model_save_step'])