Ejemplo n.º 1
0
    def __init__(self, **kwargs):
        self.macro_batched = False
        self.__dict__.update(kwargs)

        opt_param(self, ['backend_type'], 'np.float32')
        self.backend_type = ensure_dtype(self.backend_type)  # string to dtype
        logger.info("Setting dtype to" + str(self.backend_type))
Ejemplo n.º 2
0
    def __init__(self, **kwargs):
        self.initialized = False
        self.__dict__.update(kwargs)

        opt_param(self, ['name'], 'layer')

        opt_param(self, [
            'pre_act_dtype', 'output_dtype', 'deltas_dtype', 'weight_dtype',
            'updates_dtype'
        ], np.float32)
        opt_param(self, ['prev_layer'])
        opt_param(self, ['activation'], Linear())

        opt_param(self, ['is_local', 'is_data', 'is_cost'], False)
        opt_param(self, ['is_random'], False)

        opt_param(self, ['skip_act', 'has_params'], False)
        opt_param(self, ['prev_names'], [])

        opt_param(self, ['backend_type'], 'np.float32')
        self.backend_type = ensure_dtype(self.backend_type)  # string to dtype
        logger.info("Setting layer dtype to" + str(self.backend_type))
        for some_type in [
                'pre_act_dtype', 'output_dtype', 'deltas_dtype',
                'weight_dtype', 'updates_dtype'
        ]:
            setattr(self, some_type, self.backend_type)
Ejemplo n.º 3
0
    def __init__(self, **kwargs):
        self.macro_batched = False
        self.__dict__.update(kwargs)

        opt_param(self, ['backend_type'], 'np.float32')
        self.backend_type = ensure_dtype(self.backend_type)  # string to dtype
        logger.info("Setting dtype to" + str(self.backend_type))
Ejemplo n.º 4
0
    def __init__(self, **kwargs):
        self.__dict__.update(kwargs)
        opt_param(self, ['outputbuf', 'temp'], None)
        opt_param(self, ['scale'], 1.0)

        opt_param(self, ['backend_type'], np.float32)
        self.temp_dtype = ensure_dtype(self.backend_type)  # string to dtype
        logger.info("Setting dtype to" + str(self.backend_type))
Ejemplo n.º 5
0
    def __init__(self, **kwargs):
        self.__dict__.update(kwargs)
        opt_param(self, ['outputbuf', 'temp'], None)
        opt_param(self, ['scale'], 1.0)

        opt_param(self, ['backend_type'], np.float32)
        self.temp_dtype = ensure_dtype(self.backend_type)  # string to dtype
        logger.info("Setting dtype to" + str(self.backend_type))
Ejemplo n.º 6
0
    def fit(self, dataset):
        error = self.backend.empty((1, 1))
        self.print_layers()
        self.data_layer.init_dataset(dataset)
        self.data_layer.use_set('train')
        if (self.num_grad_params is not None) \
                and (str(ensure_dtype(self.backend_type)) ==
                     "<type 'numpy.float64'>"):
            self.grad_checker(numgrad=self.num_grad_params)
        if self.make_plots is True:
            viz = VisualizeRNN()
            errorlist = []
            suberrorlist = []
        logger.info('commencing model fitting')
        suberror = self.backend.zeros((1, 1))
        while self.epochs_complete < self.num_epochs:
            self.backend.begin(Block.epoch, self.epochs_complete)
            error.fill(0.0)
            mb_id = 1
            self.data_layer.reset_counter()
            dlnb = self.data_layer.num_batches
            while self.data_layer.has_more_data():
                self.backend.begin(Block.minibatch, mb_id)
                self.reset(mb_id)
                self.backend.begin(Block.fprop, mb_id)
                self.fprop(debug=(True if (mb_id is -1) else False))
                self.backend.end(Block.fprop, mb_id)
                self.backend.begin(Block.bprop, mb_id)
                self.bprop(debug=(True if (mb_id is -1) else False))
                self.backend.end(Block.bprop, mb_id)
                self.backend.begin(Block.update, mb_id)
                self.update(self.epochs_complete)
                self.backend.end(Block.update, mb_id)

                self.cost_layer.cost.set_outputbuf(
                    self.class_layer.output_list[-1])
                suberror = self.cost_layer.get_cost()
                if self.make_plots is True:
                    suberrorlist.append(float(suberror.asnumpyarray()))
                self.backend.add(error, suberror, error)
                if self.step_print > 0 and mb_id % self.step_print == 0:
                    logger.info('%d:%d logloss=%0.5f', self.epochs_complete,
                                mb_id / self.step_print,
                                float(error.asnumpyarray()) / dlnb)
                self.backend.end(Block.minibatch, mb_id)
                mb_id += 1
            self.epochs_complete += 1
            if self.make_plots is True:
                errorlist.append(float(error.asnumpyarray()) / dlnb)
            logger.info('epoch: %d, total training error: %0.5f',
                        self.epochs_complete,
                        float(error.asnumpyarray()) / dlnb)
            self.backend.end(Block.epoch, self.epochs_complete - 1)
            self.save_snapshot()
            if self.make_plots is True:
                self.plot_layers(viz, suberrorlist, errorlist)

        self.data_layer.cleanup()
Ejemplo n.º 7
0
Archivo: rnn.py Proyecto: ypkang/neon
    def fit(self, dataset):
        error = self.backend.empty((1, 1))
        self.print_layers()
        self.data_layer.init_dataset(dataset)
        self.data_layer.use_set("train")
        if (self.num_grad_params is not None) and (str(ensure_dtype(self.backend_type)) == "<type 'numpy.float64'>"):
            self.grad_checker(numgrad=self.num_grad_params)
        if self.make_plots is True:
            viz = VisualizeRNN()
            errorlist = []
            suberrorlist = []
        logger.info("commencing model fitting")
        suberror = self.backend.zeros((1, 1))
        while self.epochs_complete < self.num_epochs:
            self.backend.begin(Block.epoch, self.epochs_complete)
            error.fill(0.0)
            mb_id = 1
            self.data_layer.reset_counter()
            dlnb = self.data_layer.num_batches
            while self.data_layer.has_more_data():
                self.backend.begin(Block.minibatch, mb_id)
                self.reset(mb_id)
                self.backend.begin(Block.fprop, mb_id)
                self.fprop(debug=(True if (mb_id is -1) else False))
                self.backend.end(Block.fprop, mb_id)
                self.backend.begin(Block.bprop, mb_id)
                self.bprop(debug=(True if (mb_id is -1) else False))
                self.backend.end(Block.bprop, mb_id)
                self.backend.begin(Block.update, mb_id)
                self.update(self.epochs_complete)
                self.backend.end(Block.update, mb_id)

                self.cost_layer.cost.set_outputbuf(self.class_layer.output_list[-1])
                suberror = self.cost_layer.get_cost()
                if self.make_plots is True:
                    suberrorlist.append(float(suberror.asnumpyarray()))
                self.backend.add(error, suberror, error)
                if self.step_print > 0 and mb_id % self.step_print == 0:
                    logger.info(
                        "%d:%d logloss=%0.5f",
                        self.epochs_complete,
                        mb_id / self.step_print,
                        float(error.asnumpyarray()) / dlnb,
                    )
                self.backend.end(Block.minibatch, mb_id)
                mb_id += 1
            self.epochs_complete += 1
            if self.make_plots is True:
                errorlist.append(float(error.asnumpyarray()) / dlnb)
            logger.info(
                "epoch: %d, total training error: %0.5f", self.epochs_complete, float(error.asnumpyarray()) / dlnb
            )
            self.backend.end(Block.epoch, self.epochs_complete - 1)
            self.save_snapshot()
            if self.make_plots is True:
                self.plot_layers(viz, suberrorlist, errorlist)

        self.data_layer.cleanup()
Ejemplo n.º 8
0
    def __init__(self, **kwargs):
        self.initialized = False
        self.__dict__.update(kwargs)

        opt_param(self, ['name'], 'layer')

        opt_param(self, ['pre_act_dtype', 'output_dtype', 'deltas_dtype',
                         'weight_dtype', 'updates_dtype'], np.float32)
        opt_param(self, ['prev_layer'])
        opt_param(self, ['activation'], Linear())

        opt_param(self, ['is_local', 'is_data', 'is_cost'], False)
        opt_param(self, ['is_random'], False)

        opt_param(self, ['skip_act', 'has_params'], False)
        opt_param(self, ['prev_names'], [])

        opt_param(self, ['backend_type'], 'np.float32')
        self.backend_type = ensure_dtype(self.backend_type)  # string to dtype
        logger.info("Setting layer dtype to" + str(self.backend_type))
        for some_type in ['pre_act_dtype', 'output_dtype', 'deltas_dtype',
                          'weight_dtype', 'updates_dtype']:
            setattr(self, some_type, self.backend_type)