def __init__(self, params=None):
     if params == None:
         print "Please provide input params"
     else:
         self._params = params
         self.model_options = self._params
         Preprocess.__init__(self, self.model_options)
    def __init__(self, Object=None, params=None):

        default_params = {
            "num_hidden_layers":2,
            "dim_proj":128,
            "patience":10,
            "max_epochs":5000,
            "dispFreq":10,
            "decay_c":0.,
            "lrate":0.0001,
            "n_words":10000,
            "optimizer":"adadelta",
            "encoder":"lstm",
            "saveto":"lstm_model.npz",
            "validFreq":370,
            "saveFreq":1110,
            "maxlen":100,
            "batch_size":16,
            "valid_batch_size":64,
            "valid_portion":0.05,
            "dataset":"imdb",
            "noise_std":0.,
            "use_dropout":True,
            "reload_model":"",
            "text_col":0,
            "dedupe":True,
            "label_col":5,
            "train_max":0.5,
            "train_size":1524,
            "test_size":1533,
            "data_directory":"../../../data/",
            "data_file":"Annotated_Comments_for_Always_Discreet_1.csv",
            #"data_file":"Annotated_Comments_for_Crest White Strips 1.csv",
            "raw_rows":None,
            "class_type":"Sentiment",
            "correct_spelling":False,
            "unk_threshold":0.8
        }

        # self._del_keys = ['_layers','f_grad_shared','f_grad']
        self._del_keys = ['_layers','f_grad_shared','f_grad','train_set','test_set','_test_xx','_trXX','_teXX']

        if params!=None:
            #print params
            for key,value in params.iteritems():
                try:
                    default_params[key] = value
                except:
                    print "Could not add: " + str(key) +" --> "+ str(value)
        self._params = default_params
        self._model_options = self._params # Originally in Load_LSTM_Params

        ### THIS OBJECT THING NEEDS TO BE SIMPLIFIED NOW THAT EVERYTHING IS INHERITED.
        if Object==None:

            # self._params = params
            Preprocess.__init__(self, self._model_options)
            # Load_LSTM_Params.__init__(self, self._params)

            self._layers = {'lstm': (self.param_init_lstm, self.lstm_layer)}

        elif Object!=None: # Copy the LSTM Object

            # Params & Data variables
            self._params  = copy.deepcopy(Object._params)
            self.train_set = copy.deepcopy(Object.train_set)
            self.valid_set = copy.deepcopy(Object.valid_set)
            self.test_set  = copy.deepcopy(Object.test_set)
            self._DICTIONARY = copy.deepcopy(Object._DICTIONARY)

            try:
                print "Assuming object is LSTM, copying..."
                # LSTM variables
                self._layers = copy.deepcopy(Object._layers)
                self._model_options = copy.deepcopy(Object._model_options)
                self._params  = copy.deepcopy(Object._params)
                self._tparams = copy.deepcopy(Object._tparams) # I don't know if this will work, do Theano variables need to be recompiled?
                self._model_options = copy.deepcopy(Object._model_options)
                self.optimizer = self._model_options['optimizer']

            except:
                print "Couldn't copy LSTM Object, initializing a new object."
                self._layers = {'lstm': (self.param_init_lstm, self.lstm_layer)}
                self._model_options = Object._model_options
                self._params  = self._init_params(self._model_options)
                self._tparams = self._init_tparams(self._params)
                self.optimizer = self._model_options['optimizer']