Exemplo n.º 1
0
 def init_unpickled(self):
     super(FullBatchLoader, self).init_unpickled()
     self._original_data_ = memory.Array()
     self._original_labels_ = []
     self._mapped_original_labels_ = memory.Array()
     self.sources_["fullbatch_loader"] = {}
     self._global_size = None
     self._krn_const = numpy.zeros(2, dtype=Loader.LABEL_DTYPE)
Exemplo n.º 2
0
    def __init__(self, workflow, **kwargs):
        super(GDRProp, self).__init__(workflow, **kwargs)
        self.initial_learning_rate = 0.01
        self.min_learning_rate = 10**-6
        self.max_learning_rate = 1
        self.increase = 1.05
        self.decrease = 0.80

        self.weight_lrs = memory.Array()
        self.bias_lrs = memory.Array()
Exemplo n.º 3
0
    def __init__(self, workflow, **kwargs):
        kwargs["view_group"] = "LOADER"
        self.last_minibatch = Bool()
        super(Loader, self).__init__(workflow, **kwargs)
        self.verify_interface(ILoader)

        self.prng = kwargs.get("prng", random_generator.get())

        if not self.testing:
            self.shuffle_limit = kwargs.get("shuffle_limit",
                                            numpy.iinfo(numpy.uint32).max)
        else:
            self.shuffle_limit = 0
        self._max_minibatch_size = kwargs.get("minibatch_size", 100)
        if self._max_minibatch_size < 1:
            raise ValueError("minibatch_size must be greater than zero")

        self._class_lengths = [0] * len(CLASS_NAME)
        self._class_end_offsets = [0] * len(CLASS_NAME)
        self._has_labels = False

        self.epoch_ended = Bool()
        self.epoch_number = 0
        self.train_ended = Bool()
        self.test_ended = Bool()

        self.samples_served = 0
        self._global_offset = 0

        self.minibatch_class = 0
        self.minibatch_data = memory.Array(shallow_pickle=True)
        self.minibatch_indices = memory.Array(shallow_pickle=True)
        self.minibatch_labels = memory.Array(shallow_pickle=True)
        self._raw_minibatch_labels = []
        self._labels_mapping = {}
        self._reversed_labels_mapping = []
        self._samples_mapping = defaultdict(set)

        self.failed_minibatches = []
        self._total_failed = 0
        self._on_initialized = nothing
        self._unique_labels_count = 1  # "None" label

        self.shuffled_indices = memory.Array()
        self.normalization_type = kwargs.get("normalization_type", "none")
        self.normalization_parameters = kwargs.get("normalization_parameters",
                                                   {})
        self.train_ratio = kwargs.get("train_ratio", self.train_ratio)
Exemplo n.º 4
0
 def __init__(self, workflow, **kwargs):
     super(LoaderMSEMixin, self).__init__(workflow, **kwargs)
     self.class_targets = memory.Array()
     self._minibatch_targets = memory.Array(shallow_pickle=True)
     self._targets_shape = kwargs.get("targets_shape", tuple())
     self.target_normalization_type = kwargs.get(
         "target_normalization_type",
         kwargs.get("normalization_type", "none"))
     if "target_normalization_type" in kwargs and \
             self.target_normalization_type != self.normalization_type and \
             "target_normalization_parameters" not in kwargs:
         raise ValueError("You set target_normalization_type in %s which "
                          "is different from normalization_type but did not"
                          " set target_normalization_parameters." %
                          self.target_normalization_type)
     self.target_normalization_parameters = kwargs.get(
         "target_normalization_parameters",
         kwargs.get("normalization_parameters", {}))
Exemplo n.º 5
0
 def init_unpickled(self):
     super(FullBatchLoaderMSEMixin, self).init_unpickled()
     self._original_targets_ = memory.Array()
     self._kernel_target_ = None
     self._global_size_target = None