Example #1
0
File: hdl.py Project: npinto/hdl
    def learn(self,layer_start=0):

        # learn additional layers:
        for mind, m in enumerate(self.model_sequence[layer_start:]):
            if not mind:
                l = learners.SGD(model=m,datasource=self.datasource,display_every=20000,batchsize=self.batchsize)
                l_firstlayer = l
            else:
                l = learners.SGD_layer(first_layer_learner=l_firstlayer,model=m,datasource=self.datasource,display_every=20000,batchsize=self.batchsize,model_sequence=self.model_sequence[:mind],layer_params=self.layer_params)

            whitenpatches = self.layer_params[mind]['whitenpatches']
            databatch = l.get_databatch(whitenpatches)

            l.model.learn_whitening(databatch)
            l.model.setup()

            sched_list = self.schedules[mind]

            for sdict in sched_list:
                if sdict.has_key('change_target'):
                    l.change_target(sdict['change_target'])
                if sdict.has_key('batchsize'):
                    l.batchsize *= sdict['batchsize']
                if sdict.has_key('iterations'):
                    l.learn(iterations=sdict['iterations'])
                else:
                    l.learn()

            from display import display_final
            display_final(self.model_sequence[mind])
Example #2
0
    def fit(self,X,y=None):

        if not self._setup_complete: self.setup(X)

        databatch = self._learner.get_databatch(self.whitenpatches)
        self._learner.model.learn_whitening(databatch)
        self._learner.model.setup()

        sched_list = self.get_learning_schedule()

        for sdict in sched_list:
            if sdict.has_key('change_target'):
                self._learner.change_target(sdict['change_target'])
            if sdict.has_key('batchsize'):
                self._learner.batchsize = sdict['batchsize']
            if sdict.has_key('iterations'):
                self._learner.learn(iterations=sdict['iterations'])
            else:
                self._learner.learn()

        display_final(self._model)

        self._learning_complete = True

        return self
Example #3
0
    def fit(self, X, y=None):
        if not self._setup_complete: self.setup(X)

        databatch = self._learner.get_databatch(self.whitenpatches)
        self._learner.model.learn_whitening(databatch)

        display_final(self._model)

        self._learning_complete = True

        return self
Example #4
0
    def learn(self,layer_start=0):

        l_firstlayer = None

        # learn additional layers:
        for mind, m in enumerate(self.model_sequence):
            if not mind:
                if self.ipython_profile is None:
                    l = learners.SGD(model=m,datasource=self.datasource,display_every=self.display_every,save_every=self.save_every,batchsize=self.batchsize)
                else:
                    l = parallel_learners.SGD(model=m,datasource=self.datasource,display_every=self.display_every,save_every=self.save_every,batchsize=self.batchsize,ipython_profile=self.ipython_profile)
                l_firstlayer = l
            else:
                if self.ipython_profile is None:
                    l = learners.SGD_layer(first_layer_learner=l_firstlayer,model=m,datasource=self.datasource,display_every=self.display_every,save_every=self.save_every,batchsize=self.batchsize,model_sequence=self.model_sequence[:mind],layer_params=self.layer_params)
                else:
                    l = parallel_learners.SGD_layer(first_layer_learner=l_firstlayer,model=m,datasource=self.datasource,display_every=self.display_every,save_every=self.save_every,batchsize=self.batchsize,model_sequence=self.model_sequence[:mind],layer_params=self.layer_params,ipython_profile=self.ipython_profile)

            if layer_start > mind:
                self.model = models.HierarchicalModel(model_sequence=self.model_sequence[:mind+1],layer_params=self.layer_params[:mind+1])
                self.evaluate()
                continue

            whitenpatches = self.layer_params[mind]['whitenpatches']
            databatch = l.get_databatch(whitenpatches)

            l.model.learn_whitening(databatch)
            l.model.setup()

            # update self.model to use the sequence up to this layer
            self.model = models.HierarchicalModel(model_sequence=self.model_sequence[:mind+1],layer_params=self.layer_params[:mind+1])

            sched_list = self.schedules[mind]

            iter0 = self.iter
            self.evaluate()
            for sdict in sched_list:
                if sdict.has_key('change_target'):
                    l.change_target(sdict['change_target'])
                if sdict.has_key('batchsize'):
                    l.batchsize = sdict['batchsize']
                if sdict.has_key('iterations'):
                    l.learn(iterations=sdict['iterations'])
                else:
                    l.learn()

                self.iter = iter0 + l.iter
                self.evaluate()

            from display import display_final
            display_final(self.model_sequence[mind])
Example #5
0
    def learn(self,layer_start=0):

        l_firstlayer = None

        # learn additional layers:
        for mind, m in enumerate(self.model_sequence):
            if not mind:
                if self.ipython_profile is None:
                    l = learners.SGD(model=m,
                        datasource=self.datasource,
                        display_every=self.display_every,
                        save_every=self.save_every,
                        batchsize=self.batchsize,
                        layer_params=self.layer_params,
                        eta_target_maxupdate=self.start_eta_target_maxupdates[mind],
                        eta=self.start_etas[mind],
                        **self.extra_learner_kargs)
                else:
                    l = parallel_learners.SGD(model=m,
                        datasource=self.datasource,
                        display_every=self.display_every,
                        save_every=self.save_every,
                        batchsize=self.batchsize,
                        layer_params=self.layer_params,
                        ipython_profile=self.ipython_profile,
                        eta_target_maxupdate=self.start_eta_target_maxupdates[mind],
                        eta=self.start_etas[mind],
                        **self.extra_learner_kargs)
                l_firstlayer = l
            else:
                if self.ipython_profile is None:
                    l = learners.SGD_layer(first_layer_learner=l_firstlayer,
                        model=m,
                        datasource=self.datasource,
                        display_every=self.display_every,
                        save_every=self.save_every,
                        batchsize=self.batchsize,
                        model_sequence=self.model_sequence[:mind],
                        layer_params=self.layer_params,
                        eta_target_maxupdate=self.start_eta_target_maxupdates[mind],
                        eta=self.start_etas[mind],
                        **self.extra_learner_kargs)
                else:
                    l = parallel_learners.SGD_layer(first_layer_learner=l_firstlayer,
                        model=m,
                        datasource=self.datasource,
                        display_every=self.display_every,
                        save_every=self.save_every,
                        batchsize=self.batchsize,
                        model_sequence=self.model_sequence[:mind],
                        layer_params=self.layer_params,
                        ipython_profile=self.ipython_profile,
                        eta_target_maxupdate=self.start_eta_target_maxupdates[mind],
                        eta=self.start_etas[mind],
                        **self.extra_learner_kargs)

            if layer_start > mind:
                self.model = models.HierarchicalModel(model_sequence=self.model_sequence[:mind+1],layer_params=self.layer_params[:mind+1])
                if self.evaluate_unlearned_layers:
                    self.evaluate(l)
                continue

            print 'Begin learning layer', mind

            print 'Learn whitening...'
            whitenpatches = self.layer_params[mind]['whitenpatches']
            databatch = l.get_databatch(whitenpatches)

            l.model.learn_whitening(databatch)
            print 'Done.'

            print 'Setup model...'
            l.model.setup()
            print 'Done.'

            # update self.model to use the sequence up to this layer
            self.model = models.HierarchicalModel(model_sequence=self.model_sequence[:mind+1],layer_params=self.layer_params[:mind+1])

            sched_list = self.schedules[mind]

            iter0 = self.iter
            self.evaluate(l)
            for sdict in sched_list:
                if sdict.has_key('change_target'):
                    l.change_target(sdict['change_target'])
                if sdict.has_key('batchsize'):
                    l.batchsize = sdict['batchsize']
                if sdict.has_key('center_basis_functions') and hasattr(l.model,'center_basis_functions'):
                    l.model.center_basis_functions = sdict['center_basis_functions']
                if sdict.has_key('iterations'):
                    l.learn(iterations=sdict['iterations'])
                else:
                    l.learn()

                self.iter = iter0 + l.iter
                self.evaluate(l)

            from display import display_final
            display_final(self.model_sequence[mind])
            l.model.save(save_txt=True)
            l.save()