def load_data(self): self.class_lengths[0] = 0 self.class_lengths[1] = 0 self.class_lengths[2] = max(self.BATCH, 1000) self.create_originals((224, 224, 3)) prng.get().fill(self.original_data.mem) self.original_labels[:] = numpy.arange( self.original_data.shape[0], dtype=numpy.int32)
def load_data(self): self.class_lengths[0] = 0 self.class_lengths[1] = 0 self.class_lengths[2] = max(self.BATCH, 1000) self.create_originals((224, 224, 3)) prng.get().fill(self.original_data.mem) self.original_labels[:] = numpy.arange(self.original_data.shape[0], dtype=numpy.int32)
def __init__(self, workflow, **kwargs): super(GradientRBM, self).__init__(workflow, **kwargs) self.stddev = kwargs["stddev"] self.batch_size = -1 self.mem_cpy = MemCpy(self) self.mem_cpy.link_from(self.start_point) self.repeater = Repeater(self) self.repeater.link_from(self.mem_cpy) self.decision = IterationCounter( self, max_iterations=kwargs["cd_k"]) self.decision.link_from(self.repeater) self.bino_h = BinarizationGradH( self, rand=kwargs.get("rand_h", prng.get())) self.bino_h.link_attrs(self.mem_cpy, ("input", "output")) self.bino_h.link_from(self.decision) self.bino_h.gate_block = self.decision.complete self.make_v = All2AllSigmoidV( self, weights_stddev=self.stddev, weights_transposed=True, output_sample_shape=kwargs["v_size"]) self.make_v.link_from(self.bino_h) self.make_v.link_attrs(self.bino_h, ("input", "output")) self.bino_v = BinarizationGradV( self, rand=kwargs.get("rand_v", prng.get())) self.bino_v.link_attrs(self.make_v, ("input", "output")) self.bino_v.link_from(self.make_v) self.make_h = All2AllSigmoidH( self, weights_stddev=self.stddev, output_sample_shape=kwargs["h_size"]) self.make_h.link_attrs(self.bino_v, ("input", "output")) self.make_h.output = self.mem_cpy.output self.make_h.link_from(self.bino_v) self.repeater.link_from(self.make_h) self.end_point.link_from(self.decision) self.end_point.gate_block = ~self.decision.complete self.bino_h.gate_block = self.decision.complete self.mem_cpy.link_attrs(self, "input") self.bino_h.link_attrs(self, "batch_size") self.bino_v.link_attrs(self, "batch_size") self.make_v.link_attrs(self, "weights") self.make_v.link_attrs(self, ("bias", "vbias")) self.make_h.link_attrs(self, "weights") self.make_h.link_attrs(self, ("bias", "hbias")) self.link_attrs(self.make_h, "output") self.link_attrs(self.bino_v, ("v1", "output")) self.link_attrs(self.make_h, ("h1", "output")) self.demand("input", "weights", "hbias", "vbias", "batch_size")
def __init__(self, workflow, **kwargs): super(GradientRBM, self).__init__(workflow, **kwargs) self.stddev = kwargs["stddev"] self.batch_size = -1 self.mem_cpy = MemCpy(self) self.mem_cpy.link_from(self.start_point) self.repeater = Repeater(self) self.repeater.link_from(self.mem_cpy) self.decision = IterationCounter(self, max_iterations=kwargs["cd_k"]) self.decision.link_from(self.repeater) self.bino_h = BinarizationGradH(self, rand=kwargs.get("rand_h", prng.get())) self.bino_h.link_attrs(self.mem_cpy, ("input", "output")) self.bino_h.link_from(self.decision) self.bino_h.gate_block = self.decision.complete self.make_v = All2AllSigmoidV(self, weights_stddev=self.stddev, weights_transposed=True, output_sample_shape=kwargs["v_size"]) self.make_v.link_from(self.bino_h) self.make_v.link_attrs(self.bino_h, ("input", "output")) self.bino_v = BinarizationGradV(self, rand=kwargs.get("rand_v", prng.get())) self.bino_v.link_attrs(self.make_v, ("input", "output")) self.bino_v.link_from(self.make_v) self.make_h = All2AllSigmoidH(self, weights_stddev=self.stddev, output_sample_shape=kwargs["h_size"]) self.make_h.link_attrs(self.bino_v, ("input", "output")) self.make_h.output = self.mem_cpy.output self.make_h.link_from(self.bino_v) self.repeater.link_from(self.make_h) self.end_point.link_from(self.decision) self.end_point.gate_block = ~self.decision.complete self.bino_h.gate_block = self.decision.complete self.mem_cpy.link_attrs(self, "input") self.bino_h.link_attrs(self, "batch_size") self.bino_v.link_attrs(self, "batch_size") self.make_v.link_attrs(self, "weights") self.make_v.link_attrs(self, ("bias", "vbias")) self.make_h.link_attrs(self, "weights") self.make_h.link_attrs(self, ("bias", "hbias")) self.link_attrs(self.make_h, "output") self.link_attrs(self.bino_v, ("v1", "output")) self.link_attrs(self.make_h, ("h1", "output")) self.demand("input", "weights", "hbias", "vbias", "batch_size")
def __init__(self, workflow, **kwargs): super(EvaluatorRBM, self).__init__(workflow, **kwargs) self.run_is_blocking = True self.binarization = BinarizationEval(self, rand=kwargs.get( "rand", prng.get())) self.binarization.link_from(self.start_point) self.rec = All2AllSigmoidWithForeignWeights( self, output_sample_shape=kwargs["bias_shape"], weights_transposed=True) self.rec.link_from(self.binarization) self.rec.link_attrs(self.binarization, ("input", "output")) self.mse = EvaluatorMSE(self, root=False, mean=False) self.mse.link_from(self.rec) self.mse.link_attrs(self.rec, "output") self.mse.link_attrs(self.rec, ("output", "output")) self.mse.normalizer = NoneNormalizer() self.end_point.link_from(self.mse) self.binarization.link_attrs(self, "input", "batch_size") self.rec.link_attrs(self, "weights") self.mse.link_attrs(self, "target", "batch_size") self.link_attrs(self.rec, ("vbias", "bias")) self.demand("input", "weights", "target")
def __init__(self, workflow, **kwargs): kwargs["view_group"] = kwargs.get("view_group", "EVALUATOR") super(GeneticsOptimizer, self).__init__(workflow, **kwargs) self._model_ = kwargs["model"] self._config = copy.deepcopy(kwargs.get("config", root)) if "config" not in kwargs: del self.config.common self.plotters_are_disabled = kwargs.get( "plotters_are_disabled", root.common.genetics.disable.plotting) self._tuneables = [] process_config(self.config, Range, self._add_tuneable) if len(self.tuneables) == 0: raise ValueError( "There are no tunable parameters in the supplied configuration" " %s. Wrap at least one into veles.genetics.Range class." % self.config.__path__) self._chromosome_index = 0 self.generation_changed = Bool() if self.is_slave: self.complete = Bool() return self._population = ConfigPopulation( lambda *a, **k: ConfigChromosome(self, *a, **k), len(self.tuneables), [x.min_value for x in self.tuneables], [x.max_value for x in self.tuneables], kwargs["size"], rand=kwargs.get("rand", prng.get()), max_generations=kwargs.get("generations")) self.population.on_generation_changed_callback = \ self._set_generation_changed self._best_config = "" # actual type is veles.config.Config self.complete = ~self.population.improved
def __init__(self, workflow, **kwargs): kwargs["view_group"] = kwargs.get("view_group", "EVALUATOR") super(GeneticsOptimizer, self).__init__(workflow, **kwargs) self._model_ = kwargs["model"] self._config = copy.deepcopy(kwargs.get("config", root)) if "config" not in kwargs: del self.config.common self.plotters_are_disabled = kwargs.get( "plotters_are_disabled", root.common.genetics.disable.plotting) self._tuneables = [] process_config(self.config, Range, self._add_tuneable) if len(self.tuneables) == 0: raise ValueError( "There are no tunable parameters in the supplied configuration" " %s. Wrap at least one into veles.genetics.Range class." % self.config.__path__) self._chromosome_index = 0 self.generation_changed = Bool() if self.is_slave: self.complete = Bool() return self._population = ConfigPopulation( lambda *a, **k: ConfigChromosome(self, *a, **k), len(self.tuneables), [x.min_value for x in self.tuneables], [x.max_value for x in self.tuneables], kwargs["size"], rand=kwargs.get("rand", prng.get()), max_generations=kwargs.get("generations")) self.population.on_generation_changed_callback = \ self._set_generation_changed self._best_config = "" # actual type is veles.config.Config self.complete = ~self.population.improved
def __init__(self, population, binary=None, numeric=None, size=None, rand=None): """Constructs the chromosome and computes it's fitness. Parameters: size: number of genes (may be None). min_values: list of minimum values for genes. max_values: list of maximum values for genes. accuracy: floating point approximation accuracy. codes: gray codes if any. binary: binary representation of genes. numeric: list of numeric genes. """ super(Chromosome, self).__init__() self.verify_interface(IChromosome) min_values = population.optimization.min_values max_values = population.optimization.max_values accuracy = 1.0 / population.optimization.accuracy codes = population.codes self.rand = rand or prng.get() self.optimization = InlineObject() self.optimization.choice = "betw" self.optimization.code = "float" self.min_values = min_values self.max_values = max_values assert len(self.min_values) == len(self.max_values) if size is not None: assert size > 0 self.size = size self.binary = "" self.numeric = [] for j in range(size): if self.optimization.choice == "or": rand = self.rand.choice([min_values[j], max_values[j]]) self.numeric.append(rand) elif isinstance(min_values[j], float) or \ isinstance(max_values[j], float): rand = self.rand.randint(int(min_values[j] * accuracy), int(max_values[j] * accuracy) + 1) self.numeric.append(rand / accuracy) else: rand = self.rand.randint(min_values[j], max_values[j] + 1) self.numeric.append(rand) rand = int(rand * accuracy) if self.optimization.code == "gray": if rand > 0: self.binary += "1" + codes[rand] else: self.binary += "0" + codes[rand] else: self.numeric = numeric self.numeric_correct() self.binary = binary self.size = len(numeric) self.fitness = None
def _seed_random(self, rndvals): self.debug("Seeding with %s", rndvals) rndvals_split = rndvals.split(',') seeds = [] for rndval, index in zip(rndvals_split, range(len(rndvals_split))): try: binvle = binascii.unhexlify(rndval) seed = numpy.frombuffer(binvle, dtype=numpy.uint8) prng.get(index + 1).seed(seed, dtype=numpy.uint8) seeds.append(seed) continue except (binascii.Error, TypeError): pass vals = rndval.split(':') fname = vals[0] if fname == "": if index > 1: fname = rndvals_split[0].split(':')[0] + str(index) else: self.critical("Random generator file name is empty") sys.exit(errno.ENOENT) if fname == "-": seeds.append(None) try: prng.get(index + 1).seed(None) except: self.exception( "Failed to seed the random generator %d " "with the last used seed.", index + 1) sys.exit(Main.EXIT_FAILURE) continue if not os.path.isabs(fname): new_fname = os.path.abspath(fname) if os.path.exists(new_fname): fname = new_fname else: fname = os.path.join(root.common.dirs.veles, fname) if not os.path.exists(fname): self.critical( "Neither %s nor %s exist. Cannot seed " "the random generator.", new_fname, fname) sys.exit(errno.ENOENT) count = int(vals[1]) if len(vals) > 1 else 16 dtype = numpy.dtype(vals[2]) if len(vals) > 2 else numpy.int32 self.debug("Seeding with %d samples of type %s from %s to %d", count, dtype, fname, index + 1) try: seed = (numpy.fromfile(fname, dtype=dtype, count=count)) prng.get(index + 1).seed(seed, dtype=dtype) seeds.append(seed) except: self.exception("Failed to seed the random generator with %s", fname) sys.exit(Main.EXIT_FAILURE) self.seeds = seeds
def __init__(self, workflow, **kwargs): kwargs["view_group"] = "LOADER" self.last_minibatch = Bool() super(Loader, self).__init__(workflow, **kwargs) self.verify_interface(ILoader) self.prng = kwargs.get("prng", random_generator.get()) if not self.testing: self.shuffle_limit = kwargs.get( "shuffle_limit", numpy.iinfo(numpy.uint32).max) else: self.shuffle_limit = 0 self._max_minibatch_size = kwargs.get("minibatch_size", 100) if self._max_minibatch_size < 1: raise ValueError("minibatch_size must be greater than zero") self._class_lengths = [0] * len(CLASS_NAME) self._class_end_offsets = [0] * len(CLASS_NAME) self._has_labels = False self.epoch_ended = Bool() self.epoch_number = 0 self.train_ended = Bool() self.test_ended = Bool() self.samples_served = 0 self._global_offset = 0 self.minibatch_class = 0 self.minibatch_data = memory.Array(shallow_pickle=True) self.minibatch_indices = memory.Array(shallow_pickle=True) self.minibatch_labels = memory.Array(shallow_pickle=True) self._raw_minibatch_labels = [] self._labels_mapping = {} self._reversed_labels_mapping = [] self._samples_mapping = defaultdict(set) self.failed_minibatches = [] self._total_failed = 0 self._on_initialized = nothing self._unique_labels_count = 1 # "None" label self.shuffled_indices = memory.Array() self.normalization_type = kwargs.get("normalization_type", "none") self.normalization_parameters = kwargs.get( "normalization_parameters", {}) self.train_ratio = kwargs.get("train_ratio", self.train_ratio)
def __init__(self, workflow, **kwargs): kwargs["view_group"] = "LOADER" self.last_minibatch = Bool() super(Loader, self).__init__(workflow, **kwargs) self.verify_interface(ILoader) self.prng = kwargs.get("prng", random_generator.get()) if not self.testing: self.shuffle_limit = kwargs.get("shuffle_limit", numpy.iinfo(numpy.uint32).max) else: self.shuffle_limit = 0 self._max_minibatch_size = kwargs.get("minibatch_size", 100) if self._max_minibatch_size < 1: raise ValueError("minibatch_size must be greater than zero") self._class_lengths = [0] * len(CLASS_NAME) self._class_end_offsets = [0] * len(CLASS_NAME) self._has_labels = False self.epoch_ended = Bool() self.epoch_number = 0 self.train_ended = Bool() self.test_ended = Bool() self.samples_served = 0 self._global_offset = 0 self.minibatch_class = 0 self.minibatch_data = memory.Array(shallow_pickle=True) self.minibatch_indices = memory.Array(shallow_pickle=True) self.minibatch_labels = memory.Array(shallow_pickle=True) self._raw_minibatch_labels = [] self._labels_mapping = {} self._reversed_labels_mapping = [] self._samples_mapping = defaultdict(set) self.failed_minibatches = [] self._total_failed = 0 self._on_initialized = nothing self._unique_labels_count = 1 # "None" label self.shuffled_indices = memory.Array() self.normalization_type = kwargs.get("normalization_type", "none") self.normalization_parameters = kwargs.get("normalization_parameters", {}) self.train_ratio = kwargs.get("train_ratio", self.train_ratio)
def _seed_random(self, rndvals): self.debug("Seeding with %s", rndvals) rndvals_split = rndvals.split(',') seeds = [] for rndval, index in zip(rndvals_split, range(len(rndvals_split))): try: binvle = binascii.unhexlify(rndval) seed = numpy.frombuffer(binvle, dtype=numpy.uint8) prng.get(index + 1).seed(seed, dtype=numpy.uint8) seeds.append(seed) continue except (binascii.Error, TypeError): pass vals = rndval.split(':') fname = vals[0] if fname == "": if index > 1: fname = rndvals_split[0].split(':')[0] + str(index) else: self.critical("Random generator file name is empty") sys.exit(errno.ENOENT) if fname == "-": seeds.append(None) try: prng.get(index + 1).seed(None) except: self.exception("Failed to seed the random generator %d " "with the last used seed.", index + 1) sys.exit(Main.EXIT_FAILURE) continue if not os.path.isabs(fname): new_fname = os.path.abspath(fname) if os.path.exists(new_fname): fname = new_fname else: fname = os.path.join(root.common.dirs.veles, fname) if not os.path.exists(fname): self.critical("Neither %s nor %s exist. Cannot seed " "the random generator.", new_fname, fname) sys.exit(errno.ENOENT) count = int(vals[1]) if len(vals) > 1 else 16 dtype = numpy.dtype(vals[2]) if len(vals) > 2 else numpy.int32 self.debug("Seeding with %d samples of type %s from %s to %d", count, dtype, fname, index + 1) try: seed = (numpy.fromfile(fname, dtype=dtype, count=count)) prng.get(index + 1).seed(seed, dtype=dtype) seeds.append(seed) except: self.exception("Failed to seed the random generator with %s", fname) sys.exit(Main.EXIT_FAILURE) self.seeds = seeds
def __init__(self, workflow, **kwargs): kwargs["view_group"] = kwargs.get("view_group", "WORKER") super(Forward, self).__init__(workflow, **kwargs) self.weights_stddev = kwargs.get("weights_stddev") self.bias_stddev = kwargs.get("bias_stddev", self.weights_stddev) self.weights_filling = kwargs.get("weights_filling", "uniform") self.bias_filling = kwargs.get("bias_filling", "uniform") self.rand = kwargs.get("rand", prng.get()) self.weights_transposed = kwargs.get("weights_transposed", False) self.include_bias = kwargs.get("include_bias", True) self.demand("input") self.output = Array(shallow_pickle=True) self.weights = Array() self.bias = Array() self.forward_mode = False self.exports = ["weights", "bias", "include_bias", "weights_transposed"]
def __init__(self, workflow, **kwargs): kwargs["view_group"] = kwargs.get("view_group", "WORKER") super(Forward, self).__init__(workflow, **kwargs) self.weights_stddev = kwargs.get("weights_stddev") self.bias_stddev = kwargs.get("bias_stddev", self.weights_stddev) self.weights_filling = kwargs.get("weights_filling", "uniform") self.bias_filling = kwargs.get("bias_filling", "uniform") self.rand = kwargs.get("rand", prng.get()) self.weights_transposed = kwargs.get("weights_transposed", False) self.include_bias = kwargs.get("include_bias", True) self.demand("input") self.output = Array(shallow_pickle=True) self.weights = Array() self.bias = Array() self.forward_mode = False self.exports = ["weights", "bias", "include_bias", "weights_transposed"]
def __init__(self, workflow, **kwargs): super(EvaluatorRBM, self).__init__(workflow, **kwargs) self.run_is_blocking = True self.binarization = BinarizationEval( self, rand=kwargs.get("rand", prng.get())) self.binarization.link_from(self.start_point) self.rec = All2AllSigmoidWithForeignWeights( self, output_sample_shape=kwargs["bias_shape"], weights_transposed=True) self.rec.link_from(self.binarization) self.rec.link_attrs(self.binarization, ("input", "output")) self.mse = EvaluatorMSE(self, root=False, mean=False) self.mse.link_from(self.rec) self.mse.link_attrs(self.rec, "output") self.mse.link_attrs(self.rec, ("output", "output")) self.mse.normalizer = NoneNormalizer() self.end_point.link_from(self.mse) self.binarization.link_attrs(self, "input", "batch_size") self.rec.link_attrs(self, "weights") self.mse.link_attrs(self, "target", "batch_size") self.link_attrs(self.rec, ("vbias", "bias")) self.demand("input", "weights", "target")
def initialize(self, device, **kwargs): super(KohonenTrainer, self).initialize(device=device, **kwargs) self._neurons_number = self.shape[0] * self.shape[1] self._sample_length = self.input.mem.size // self.input.mem.shape[0] # Initialize weights if self.weights_stddev is None: # Get weights magnitude and cap it to 0.05 self.weights_stddev = min(self._get_weights_magnitude(), 0.05) weights_size = (self._sample_length * self._neurons_number) if not self.weights: self.weights.reset(numpy.zeros(weights_size, dtype=self.input.mem.dtype)) filling = { "uniform": lambda rand: rand.fill( self.weights.mem, -self.weights_stddev, self.weights_stddev), "gaussian": lambda rand: rand.fill_normal_real( self.weights.mem, 0, self.weights_stddev) } filling[self.weights_filling](prng.get()) self.weights.mem = self.weights.mem.reshape(( self._neurons_number, self._sample_length)) else: assert self.weights.shape == (self._neurons_number, self._sample_length) if self.weights_transposed: # Reshape weights as a matrix: wtrncopy = self.weights.mem.transpose().copy() self.weights.mem.shape = wtrncopy.shape self.weights.mem[:] = wtrncopy[:] self._sample_length = \ self.weights.mem.shape[0 if self.weights_transposed else 1] # Initialize winners self.winners.reset(numpy.zeros(self._neurons_number, numpy.int32)) # Initialize distances batch_size = self.input.mem.shape[0] self._distances.reset(numpy.zeros( [batch_size, self._neurons_number], dtype=self.weights.mem.dtype)) self.argmins.reset(numpy.zeros(batch_size, dtype=numpy.int32)) self._coords.reset(numpy.zeros([self._neurons_number, 2], dtype=self.weights.mem.dtype)) sz = self._neurons_number rows = int(numpy.round(numpy.sqrt(sz))) cols = sz // rows if sz % rows != 0: cols += 1 x_min = -1.0 x_max = 1.0 y_min = -1.0 y_max = 1.0 x_step = (x_max - x_min) / (cols - 1) if cols > 1 else 0 y = y_min y_step = (y_max - y_min) / (rows - 1) if rows > 1 else 0 offs = 0 mem = self._coords.mem for _row in range(rows): x = x_min + (x_step * 0.5 if _row & 1 else 0) for _col in range(cols): mem[offs, 0] = x mem[offs, 1] = y offs += 1 x += x_step y += y_step self._sigma = (self._coords.mem.ravel().max() - self._coords.mem.ravel().min()) * 1.42
def __init__(self, chromosome_factory, optimization_size, min_values, max_values, population_size, accuracy=0.00001, rand=prng.get(), max_generations=None, crossing_attempts=10): super(Population, self).__init__() self.rand = rand self.size = population_size self.chromosome_factory = chromosome_factory self.chromosomes = [] self.optimization = InlineObject() self.optimization.choice = "betw" self.optimization.code = "float" self.optimization.size = optimization_size self.optimization.min_values = min_values self.optimization.max_values = max_values assert len(min_values) == len(max_values) self.optimization.accuracy = accuracy self.fitness = None self.average_fit = None self.best_fit = None self.worst_fit = None self.median_fit = None self.prev = InlineObject() self.prev.fitness = -1.0e30 self.prev.average_fit = -1.0e30 self.prev.best_fit = -1.0e30 self.prev.worst_fit = -1.0e30 self.prev.median_fit = -1.0e30 self.roulette_select_size = 0.75 self.random_select_size = 0.5 self.tournament_size = 0.5 self.tournament_select_size = 0.1 self.crossing = InlineObject() self.crossing.pointed_crossings = 0.2 self.crossing.pointed_points = 0.08 self.crossing.pointed_probability = 1.0 self.crossing.uniform_crossings = 0.15 self.crossing.uniform_probability = 0.9 self.crossing.arithmetic_crossings = 0.15 self.crossing.arithmetic_probability = 0.9 self.crossing.geometric_crossings = 0.2 self.crossing.geometric_probability = 0.9 self.crossing.pipeline = [ self.cross_uniform, self.cross_arithmetic, self.cross_geometric ] self.delimeter = None self.codes = None self.mutations = { "binary_point": { "use": False, "chromosomes": 0.2, "points": 0.06, "probability": 0.35 }, "gaussian": { "use": True, "chromosomes": 0.35, "points": 0.05, "probability": 0.7 }, "uniform": { "use": True, "chromosomes": 0.35, "points": 0.05, "probability": 0.7 }, "altering": { "use": False, "chromosomes": 0.1, "points": None, "probability": 0.35 } } self.generation = 0 self.max_generations = max_generations or self.MAX_GENERATIONS self.crossing_attempts = crossing_attempts self.improved = Bool(True) self.on_generation_changed_callback = lambda: None for _ in range(self.size): self.add(self.new(size=self.optimization.size)) if self.optimization.code == "gray": self.compute_gray_codes()
def __init__(self, workflow, **kwargs): super(DropoutForward, self).__init__(workflow, **kwargs) self.mask = Array() # dropout mask self.states = Array() self.rand = random_generator.get()
def __init__(self, workflow, **kwargs): super(Binarization, self).__init__(workflow, **kwargs) self.output = Array() self.rand = kwargs.get("rand", prng.get()) self.demand("input", "batch_size")
def __init__(self, workflow, **kwargs): super(DropoutForward, self).__init__(workflow, **kwargs) self.mask = Array() # dropout mask self.states = Array() self.rand = random_generator.get()
def initialize(self, device, **kwargs): super(KohonenTrainer, self).initialize(device=device, **kwargs) self._neurons_number = self.shape[0] * self.shape[1] self._sample_length = self.input.mem.size // self.input.mem.shape[0] # Initialize weights if self.weights_stddev is None: # Get weights magnitude and cap it to 0.05 self.weights_stddev = min(self._get_weights_magnitude(), 0.05) weights_size = (self._sample_length * self._neurons_number) if not self.weights: self.weights.reset( numpy.zeros(weights_size, dtype=self.input.mem.dtype)) filling = { "uniform": lambda rand: rand.fill(self.weights.mem, -self.weights_stddev, self.weights_stddev), "gaussian": lambda rand: rand.fill_normal_real(self.weights.mem, 0, self. weights_stddev) } filling[self.weights_filling](prng.get()) self.weights.mem = self.weights.mem.reshape( (self._neurons_number, self._sample_length)) else: assert self.weights.shape == (self._neurons_number, self._sample_length) if self.weights_transposed: # Reshape weights as a matrix: wtrncopy = self.weights.mem.transpose().copy() self.weights.mem.shape = wtrncopy.shape self.weights.mem[:] = wtrncopy[:] self._sample_length = \ self.weights.mem.shape[0 if self.weights_transposed else 1] # Initialize winners self.winners.reset(numpy.zeros(self._neurons_number, numpy.int32)) # Initialize distances batch_size = self.input.mem.shape[0] self._distances.reset( numpy.zeros([batch_size, self._neurons_number], dtype=self.weights.mem.dtype)) self.argmins.reset(numpy.zeros(batch_size, dtype=numpy.int32)) self._coords.reset( numpy.zeros([self._neurons_number, 2], dtype=self.weights.mem.dtype)) sz = self._neurons_number rows = int(numpy.round(numpy.sqrt(sz))) cols = sz // rows if sz % rows != 0: cols += 1 x_min = -1.0 x_max = 1.0 y_min = -1.0 y_max = 1.0 x_step = (x_max - x_min) / (cols - 1) if cols > 1 else 0 y = y_min y_step = (y_max - y_min) / (rows - 1) if rows > 1 else 0 offs = 0 mem = self._coords.mem for _row in range(rows): x = x_min + (x_step * 0.5 if _row & 1 else 0) for _col in range(cols): mem[offs, 0] = x mem[offs, 1] = y offs += 1 x += x_step y += y_step self._sigma = (self._coords.mem.ravel().max() - self._coords.mem.ravel().min()) * 1.42
def __init__( self, chromosome_factory, optimization_size, min_values, max_values, population_size, accuracy=0.00001, rand=prng.get(), max_generations=None, crossing_attempts=10, ): super(Population, self).__init__() self.rand = rand self.size = population_size self.chromosome_factory = chromosome_factory self.chromosomes = [] self.optimization = InlineObject() self.optimization.choice = "betw" self.optimization.code = "float" self.optimization.size = optimization_size self.optimization.min_values = min_values self.optimization.max_values = max_values assert len(min_values) == len(max_values) self.optimization.accuracy = accuracy self.fitness = None self.average_fit = None self.best_fit = None self.worst_fit = None self.median_fit = None self.prev = InlineObject() self.prev.fitness = -1.0e30 self.prev.average_fit = -1.0e30 self.prev.best_fit = -1.0e30 self.prev.worst_fit = -1.0e30 self.prev.median_fit = -1.0e30 self.roulette_select_size = 0.75 self.random_select_size = 0.5 self.tournament_size = 0.5 self.tournament_select_size = 0.1 self.crossing = InlineObject() self.crossing.pointed_crossings = 0.2 self.crossing.pointed_points = 0.08 self.crossing.pointed_probability = 1.0 self.crossing.uniform_crossings = 0.15 self.crossing.uniform_probability = 0.9 self.crossing.arithmetic_crossings = 0.15 self.crossing.arithmetic_probability = 0.9 self.crossing.geometric_crossings = 0.2 self.crossing.geometric_probability = 0.9 self.crossing.pipeline = [self.cross_uniform, self.cross_arithmetic, self.cross_geometric] self.delimeter = None self.codes = None self.mutations = { "binary_point": {"use": False, "chromosomes": 0.2, "points": 0.06, "probability": 0.35}, "gaussian": {"use": True, "chromosomes": 0.35, "points": 0.05, "probability": 0.7}, "uniform": {"use": True, "chromosomes": 0.35, "points": 0.05, "probability": 0.7}, "altering": {"use": False, "chromosomes": 0.1, "points": None, "probability": 0.35}, } self.generation = 0 self.max_generations = max_generations or self.MAX_GENERATIONS self.crossing_attempts = crossing_attempts self.improved = Bool(True) self.on_generation_changed_callback = lambda: None for _ in range(self.size): self.add(self.new(size=self.optimization.size)) if self.optimization.code == "gray": self.compute_gray_codes()
def __init__(self, workflow, **kwargs): super(Binarization, self).__init__(workflow, **kwargs) self.output = Array() self.rand = kwargs.get("rand", prng.get()) self.demand("input", "batch_size")