Exemple #1
0
 def redo(self):
     # Pick a source
     if self.sample_merged:
         src_layer = layer.Layer()
         for l in self.doc.layers:
             l.merge_into(src_layer, strokemap=False)
     else:
         src_layer = self.doc.layer
     # Choose a target
     if self.make_new_layer:
         # Write to a new layer
         assert self.new_layer is None
         nl = layer.Layer()
         nl.content_observers.append(self.doc.layer_modified_cb)
         nl.set_symmetry_axis(self.doc.get_symmetry_axis())
         self.new_layer = nl
         self.new_layer_idx = self.doc.layer_idx + 1
         self.doc.layers.insert(self.new_layer_idx, nl)
         self.doc.layer_idx = self.new_layer_idx
         self._notify_document_observers()
         dst_layer = nl
     else:
         # Overwrite current, but snapshot 1st
         assert self.snapshot is None
         self.snapshot = self.doc.layer.save_snapshot()
         dst_layer = self.doc.layer
     # Fill connected areas of the source into the destination
     src_layer.flood_fill(self.x,
                          self.y,
                          self.color,
                          self.bbox,
                          self.tolerance,
                          dst_layer=dst_layer)
Exemple #2
0
def createNetwork(learnConst, momentum):
    ne.Neuron.talk = networkTalk
    la.Layer.talk = networkTalk
    say("Creating Layers")
    say("----------------------------------------------------------")
    layers.append(la.Layer("Int Layer", learnConst, momentum))
    layers.append(la.Layer("Hid Layer1", learnConst, momentum))
    # layers.append(la.Layer("Hid Layer2", learnConst, momentum))
    layers.append(la.Layer("Out Layer", learnConst, momentum))
    layers[2].isOutput()
    say("----------------------------------------------------------")
    say("Done creating layers! \n")

    say("Adding nodes to layers")
    say("----------------------------------------------------------")
    layers[0].addNeuron(ne.Neuron(linear, "Inp neuron", False))
    for i in range(20):
        layers[1].addNeuron(ne.Neuron(nonLinear, "Hid neuron" + str(i), False))
    layers[2].addNeuron(ne.Neuron(linear, "Out neuron", False))
    say("----------------------------------------------------------")
    say("Done adding! \n")

    say("Adding connections between layers")
    say("----------------------------------------------------------")
    addConnection(layers[0], layers[1])
    addConnection(layers[1], layers[2])
    # addConnection(layers[2], layers[3])
    say("----------------------------------------------------------")
    say("Done building! \n")
    return (layers[0], layers[2])
Exemple #3
0
 def save_png(self,
              filename,
              alpha=False,
              multifile=False,
              animation=False,
              **kwargs):
     doc_bbox = self.get_effective_bbox()
     if multifile:
         self.save_multifile_png(filename, **kwargs)
     elif animation:
         self.ani.save_png(filename, **kwargs)
     if alpha:
         tmp_layer = layer.Layer()
         for l in self.layers:
             l.merge_into(tmp_layer)
         tmp_layer._surface.save(filename, *doc_bbox)
     else:
         if alpha:
             tmp_layer = layer.Layer()
             for l in self.layers:
                 l.merge_into(tmp_layer)
             tmp_layer.save_as_png(filename, *doc_bbox)
         else:
             pixbufsurface.save_as_png(self,
                                       filename,
                                       *doc_bbox,
                                       alpha=False,
                                       **kwargs)
Exemple #4
0
	def __init__(self, title="", width=None, height=None, main_window=True):
		#set share
		if share.USE_FULLSCREEN:
			share.REAL_WIDTH = share.SCREEN_WIDTH
			share.REAL_HEIGHT = int(
				share.SCREEN_WIDTH*(
				share.STD_HEIGHT*1.0/share.STD_WIDTH))
		else:
			if width: share.REAL_WIDTH = width
			if height: share.REAL_HEIGHT = height
		share.WIDTH_ZOOM_SCALE = share.REAL_WIDTH*1.0/share.STD_WIDTH
		share.HEIGHT_ZOOM_SCALE = share.REAL_HEIGHT*1.0/share.STD_HEIGHT
		share.WINDOW_TITLE = title
		#set window
		gtk.Window.__init__(self, gtk.WINDOW_TOPLEVEL)
		widget.Widget.__init__(self)
		self.set_resizable(False)
		self.set_app_paintable(True)
		if share.USE_FULLSCREEN:
			self.modify_bg(0, gtk.gdk.color_parse("#333"))
			self.set_decorated(False)
			self.set_size_request(share.SCREEN_WIDTH, share.SCREEN_HEIGHT)
		else:
			self.set_size_request(share.REAL_WIDTH, share.REAL_HEIGHT)
		self.set_position(gtk.WIN_POS_CENTER)
		self.set_title(title)
		#self.set_events(share.EVENT_MASK)
		#enable rgba support
		#gtk.gdk.Screen().get_rgb_colormap()
		#gtk.gdk.Screen().get_rgba_colormap()
		self.color_map = self.get_screen().get_rgba_colormap() #rgba support
		if self.color_map:
			gtk.widget_set_default_colormap(self.color_map)
			self.set_colormap(self.color_map)
		#set layer
		self.layer_list = {}
		if share.USE_FULLSCREEN:
			self.layer_list["__BASE__"] = layer.Layer("__BASE__")
			self.layer_list["__SCREEN__"] = layer.Layer("__SCREEN__")
			self.layer_list["__SCREEN__"].put(
				self.layer_list["__BASE__"],
				0,
				(share.SCREEN_HEIGHT-share.REAL_HEIGHT)/2,
			)
			self.add(self.layer_list["__SCREEN__"])
		else:
			self.layer_list["__BASE__"] = layer.Layer("__BASE__")
			self.add(self.layer_list["__BASE__"])
		#set event
		self.bind_event("exit", event.exit)
		self.bind_event("destroy", lambda *args: True)
		self.show_all()
		#set main window
		if main_window:
			share.MAIN_WINDOW = self
			share.set_gtk_font(share.GLOBAL_GTK_FONT_NAME, share.GLOBAL_GTK_FONT_SIZE)
			self.hide()
Exemple #5
0
    def __init__(self, W_init, b_init, activations):
        '''
        Multi-layer perceptron class, computes the composition of a sequence of Layers

        :parameters:
            - W_init : list of np.ndarray, len=N
                Values to initialize the weight matrix in each layer to.
                The layer sizes will be inferred from the shape of each matrix in W_init
            - b_init : list of np.ndarray, len=N
                Values to initialize the bias vector in each layer to
            - activations : list of theano.tensor.elemwise.Elemwise, len=N
                Activation function for layer output for each layer
        '''
        # Make sure the input lists are all of the same length
        assert len(W_init) == len(b_init) == len(activations)

        # Initialize lists of layers
        self.layers = []
        # Construct the layers
        for W, b, activation in zip(W_init, b_init, activations):
            self.layers.append(layer.Layer(W, b, activation))

        # Combine parameters from all layers
        self.params = []
        for l in self.layers:
            self.params += l.params
Exemple #6
0
    def load_model_from_stream(self, f):
        self.layers = []
        self.layer_params = []

        type_code = struct.unpack('i', f.read(4))[0]
        self.check_type_code(type_code)

        n_layers = struct.unpack('i', f.read(4))[0]
        for i in range(n_layers):
            layer = ly.Layer()
            layer.load_from_stream(f)
            self.layers.append(layer)

        n_params = struct.unpack('i', f.read(4))[0]
        for i in range(n_params):
            # p = ly.LayerParams(in_stream=f)
            p = ly.LayerParams.load_from_stream(f)
            self.layer_params.append(p)

            for layer in self.layers:
                if layer._param_id == p._param_id:
                    layer.set_params(p)
                elif layer.use_batch_normalization and layer.bn_layer._param_id == p._param_id:
                    layer.bn_layer.set_params(p)

        self.in_dim = self.layers[0].in_dim
        self.out_dim = self.layers[-1].out_dim
        self.loss = self.layers[-1].loss
        
        self.output_layer_added = False
        self._update_param_size()
Exemple #7
0
    def __init__(self, layers):
        self.layers = []

        for lay in layers:
            numIn = lay[0]
            numN = lay[1]
            self.layers.append(layer.Layer(numInputs=numIn, numNodes=numN))
Exemple #8
0
    def __init__(self, sizes, cost=CrossEntropyCost, act=SigmoidActivation):
        """The list ``sizes`` contains the number of neurons in the
        respective layers of the network.  For example, if the list
        was [2, 3, 1] then it would be a three-layer network, with the
        first layer containing 2 neurons, the second layer 3 neurons,
        and the third layer 1 neuron.  The biases and weights for the
        network are initialized randomly, using a Gaussian
        distribution with mean 0, and variance 1.  Note that the first
        layer is assumed to be an input layer, and by convention we
        won't set any biases for those neurons, since biases are only
        ever used in computing the outputs from later layers."""
        self.num_layers = len(sizes)
        self.sizes = sizes
        self.default_weight_initializer()
        self.cost = cost
        self.activ_fn = act
        # line zip is to generate a full connected network weights

        self.layers = [
            layer.Layer(w, b, act)
            for w, b in zip(self.weights[:-1], self.biases[:-1])
        ]

        ' output can be configured by output config string '
        cost_act = None if cost.combined() else act
        self.output_layer = layer.OutputLayer(self.weights[-1],
                                              self.biases[-1], cost_act, cost)
Exemple #9
0
 def __init__(self, doc, insert_idx=None, name=''):
     self.doc = doc
     self.insert_idx = insert_idx
     snapshot = self.doc.layers[self.insert_idx].save_snapshot()
     self.new_layer = layer.Layer(name)
     self.new_layer.load_snapshot(snapshot)
     self.new_layer.content_observers.append(self.doc.layer_modified_cb)
Exemple #10
0
    def JoinSpatialTableToLayer(self, definition):
        """ Uses Spatial join to find points within polygons.

        This module is likely more flexible than that, but that's what I
        have currently tested.
        """
        #print('In JoinSpatialTableToLayer.  definition: {}'.format(definition))
        layer_name = definition['layer_name']
        layer_path = definition['layer_path']
        table_name = definition['table_name']

        try:
            layer_style = definition['layer_style']
        except KeyError:
            layer_style = False

        out_name = layer_name + '__' + table_name

        out_feature_class = self.gdb_path + '\\' + out_name
        join_features = self.gdb_path + '\\' + table_name

        arcpy.SpatialJoin_analysis(target_features=layer_path,
                                   join_features=join_features,
                                   out_feature_class=out_feature_class)

        new_layer = layer.Layer({
            'path': out_feature_class,
            'name': out_name,
            'style': layer_style,
            'definition_query': 'Join_Count > 0'
        })

        self.new_layers.append(new_layer)
    def __init__(self, layup, materialID, core_thick=0.0, compute=None):

        self.layup, self.symmetric = parse_layup(layup)
        self.layers = []

        counter = 0
        for orientation in self.layup:
            new_layer = layer.Layer(materialID, orientation)
            new_layer.set_index(counter)
            counter += 1
            self.layers.append(new_layer)

        self.total_ply_thickness = len(
            self.layup) * self.layers[0].PROPS['h0'] / 1000
        if core_thick != 0.0:
            assert (core_thick > 0.0)
            self.has_core = True
        else:
            self.has_core = False

        self.zc = float(core_thick)
        self.total_thickness = self.total_ply_thickness + self.zc
        self._assign_h()
        self.computed = False
        if compute is not None:
            self.compute_all()
Exemple #12
0
def calculate_metrics(data,
                      target,
                      receptive_field_length,
                      threshold,
                      parameters=None,
                      num_data=2000,
                      isForced=False,
                      isSorted=False,
                      isRSTDP=False):

    # Structure of the TNN

    num_outputs = 10

    #threshold indicates the highest filter spiketime that can be condsidered
    layer1 = firstlayer.FirstLayer(
        layer_id=1,
        training_raw_data=data[0],
        threshold=8,
        receptive_field_length=receptive_field_length)
    receptive_field = (int(14 - receptive_field_length / 2),
                       int(14 - receptive_field_length / 2))

    # threshold indicates the max neuron sum before firing
    layer2 = layer.Layer(layer_id=2,
                         num_neurons=num_outputs,
                         prev_layer=layer1,
                         threshold=threshold)
    #layer3 = layer.Layer(layer_id=3, num_neurons=num_outputs, prev_layer=layer2, threshold=threshold)
    #layer4 = layer.Layer(layer_id=4, num_neurons=num_outputs, prev_layer=layer3, threshold=threshold)
    #layer5 = layer.Layer(layer_id=5, num_neurons=num_outputs, prev_layer=layer4, threshold=threshold)

    hidden_layers = []
    hidden_layers.append(layer2)
    #hidden_layers.append(layer3)
    #hidden_layers.append(layer4)
    #hidden_layers.append(layer5)

    # selects 10000 random images for training and testing
    permutation = np.random.permutation(len(data))
    training = permutation[int(num_data / 2):num_data]
    test = permutation[:int(num_data / 2)]

    if isSorted:
        training = np.sort(training)

    # Generates spikes for layer 1 using 2 different filters
    # this is the testing phase
    #pdb.set_trace()

    training_results, assignments = evaluate(layer1, hidden_layers,
                                             data[training], target[training],
                                             receptive_field, parameters, True,
                                             None, isForced, isRSTDP)
    print(assignments)

    test_results = evaluate(layer1, hidden_layers, data[test], target[test],
                            receptive_field, parameters, False, assignments)
    return [training_results, test_results]
Exemple #13
0
 def getLayer(self):
     """Return the layer this proc is running.
     @rtype:  Layer
     @return: The layer this proc is running."""
     response = self.stub.GetLayer(
         host_pb2.ProcGetLayerRequest(proc=self.data),
         timeout=Cuebot.Timeout)
     return layer.Layer(response.layer)
Exemple #14
0
 def __init__(self, doc, insert_idx=None, after=None, name=''):
     self.doc = doc
     self.insert_idx = insert_idx
     if after:
         l_idx = self.doc.layers.index(after)
         self.insert_idx = l_idx + 1
     self.layer = layer.Layer(name)
     self.layer.content_observers.append(self.doc.layer_modified_cb)
Exemple #15
0
    def __init__(self, doc, frame):
        self.doc = doc
        self.frame = frame

        # Create new layer:
        layername = layername_from_description(self.frame.description)
        self.layer = layer.Layer(name=layername)
        self.layer._surface.observers.append(self.doc.layer_modified_cb)
Exemple #16
0
    def __init__(self, num_input_nodes, num_hidden_layers, num_nodes_per_hidden_layer, num_output_nodes):
        self.num_input_nodes = num_input_nodes
        self.num_hidden_layers = num_hidden_layers
        self.num_nodes_per_hidden_layer = num_nodes_per_hidden_layer
        self.num_output_nodes = num_output_nodes

        # create input layer
        self.input_layer = layer.Layer(num_input_nodes, 0)

        # create hidden layers
        self.hidden_layers = []
        self.hidden_layers.append(layer.Layer(num_nodes_per_hidden_layer, num_input_nodes))
        for i in range(num_hidden_layers - 1):
            self.hidden_layers.append(layer.Layer(num_nodes_per_hidden_layer, num_nodes_per_hidden_layer))

        # create output layer
        self.output_layer = layer.Layer(num_output_nodes, num_nodes_per_hidden_layer)
Exemple #17
0
 def getLayers(self):
     """Returns the list of layers
     @rtype:  list<Layer>
     @return: List of layers"""
     response = self.stub.GetLayers(
         job_pb2.JobGetLayersRequest(job=self.data), timeout=Cuebot.Timeout)
     layerSeq = response.layers
     return [layer.Layer(lyr) for lyr in layerSeq.layers]
Exemple #18
0
 def __init__(self, nlayers, ninputs, nneuronslayer):
     self.mylayers = []
     self.nlastlayer = nlayers-1
     self.ninputs = ninputs
     previousneurons = ninputs
     for i in range(nlayers):
         self.mylayers.append(ly.Layer(nneuronslayer[i], previousneurons))
         previousneurons = nneuronslayer[i]
    def __init__(self,
                 n_inputs=4,
                 n_hiddenlayers=2,
                 hiddenlayersnodes=[5, 5],
                 n_outputs=3,
                 learning_rate=0.1,
                 n_iters=1000,
                 activation='sigmoid',
                 bias=False):
        self.activation_function = {
            'sigmoid': (lambda z: 1 / (1 + np.exp(-z))),
            'hyperbolicTangent': (lambda z: (np.exp(z) - np.exp(-z)) /
                                  (np.exp(z) + np.exp(-z)))
            #'hyperbolicTangent': ( lambda z: np.tanh( z ) )
        }
        self.deriv_activation_function = {
            'sigmoid': (lambda z: z * (1 - z)),
            'hyperbolicTangent': (lambda z: 1.0 - z**2),
        }

        self.RunActivation = self.activation_function[activation]
        self.RunDerivation = self.deriv_activation_function[activation]

        self.bias = bias
        self.learning_rate = learning_rate
        self.activation = activation
        self.n_iters = n_iters
        self.layers = []

        # Input Layer
        self.layers.append(
            layer.Layer(n_inputs, hiddenlayersnodes[0], self.learning_rate,
                        self.RunActivation, self.bias))
        # Hidden Layers
        for i in range(len(hiddenlayersnodes) - 1):
            self.layers.append(
                layer.Layer(hiddenlayersnodes[i], hiddenlayersnodes[i + 1],
                            self.learning_rate, self.RunActivation, self.bias))
        # Output Layer
        self.layers.append(
            layer.Layer(hiddenlayersnodes[-1], n_outputs, self.learning_rate,
                        self.RunActivation, self.bias))
Exemple #20
0
    def __init__(self, function, input_count, shape):
        self.function = function
        self.input_count = input_count
        self.layer_struct = zip(shape[:-1], shape[1:])
        self.layer_count = len(self.layer_struct)
        self.layers = []

        # Initialize each of its layers
        for i in range(len(self.layer_struct)):
            self.layers.append(
                layer.Layer(function, input_count, self.layer_struct[i]))
Exemple #21
0
 def __init__(self, input_z_size, emb_size, class_num, code_dim, img_size):
     lay = layer.Layer()
     self.linear = lay.linear
     self.up_conv = lay.up_conv
     self.conv = lay.conv
     self.lrelu = lay.lrelu
     self.input_z_size = input_z_size
     self.emb_size = emb_size
     self.class_num = class_num
     self.code_dim = code_dim
     self.img_size = img_size
Exemple #22
0
    def layerInitializer(self):

        L = []
        inputDim = 2
        for Size in self.unitSize:
            Lt = layer.Layer(units=Size,
                             inputDim=inputDim,
                             alpha=self.myAlpha,
                             M=self.M)
            L.append(Lt)
            inputDim = Size
        return L
Exemple #23
0
def main():
    np.random.seed(1)
    a0, y = loadData()
    
    w0, b0 = layer.Layer.randomW_B(3, 4)
    w1, b1 = layer.Layer.randomW_B(4, 1)
    l0 = layer.Layer(
        w0,
        b0,
        learning_rate=0.3,
        act=activation.LeakyReLU()
    )
    l1 = layer.Layer(
        w1,
        b1,
        learning_rate=0.3,
        act=activation.Sigmoid()
    )

    for i in range(10000):
        a1 = l0.fp(a0)
        a2 = l1.fp(a1)
        loss = Loss(a2, y)  # 反向过程,由后至前反推一遍
        g_a2 = loss.gradient()
        g_a1 = l1.bp(g_a2)
        l0.bp(g_a1)

        # 验证
    s0 = [
        [0, 0, 0],
        [0, 0, 1],
        [0, 1, 0],
        [0, 1, 1],
        [1, 0, 0],
        [1, 0, 1],
        [1, 1, 0],
        [1, 1, 1]]
    s1 = l0.fp(s0)
    s2 = l1.fp(s1)
    print(s2)
Exemple #24
0
	def layer_add(self, layer_name, x=0, y=0, show=True, master_name="__BASE__"):
		try:
			if layer_name in self.layer_list:
				raise Exception("layer_name exist")
			l = layer.Layer(layer_name)
			l.set_coord(x, y)
			l.master = self.layer_list[master_name]
			l.master_name = master_name
			show and l.show()
			l.master.put(l, l.real_x, l.real_y)
			self.layer_list[layer_name] = l
		except:
			log.err(repr(self), layer_name, traceback.format_exc())
Exemple #25
0
    def __init__(self, quantity_neurons_layer):
        self.layers = []

        q_layers = len(quantity_neurons_layer)
        for i in range(q_layers):
            q_neurons = quantity_neurons_layer[i]
            #create the layers with their appropiate size
            self.layers.append(layer.Layer(q_neurons))

        for j in range(q_layers - 1):
            cur_layer = self.layers[j]
            next_layer = self.layers[j + 1]
            cur_layer.connect_layer(next_layer)
Exemple #26
0
def intersect_gadm(source_layer, gadm_layer):
    input_list = []

    output_layer = layer.Layer(None, [])

    for t in source_layer.tile_list:
        input_list.append((output_layer, t, gadm_layer.tile_list[0]))

    if os.path.splitext(source_layer.input_dataset)[1] in ['.rvrt', '.tif']:
        util.exec_multiprocess(raster_intersect, input_list)
    else:
        util.exec_multiprocess(intersect, input_list)

    return output_layer
Exemple #27
0
    def layer_setup(self):
        self.layer_height_percent = 0.9  #used for ratio in layer-gui making
        self.layer_width = self.width
        self.layer_height = self.height * self.layer_height_percent

        #Layer is split into a grid of "regions" that contain the map objects.
        self.layer_region_width = 200
        self.layer_region_height = 200
        self.layer_grid_visibility = False  #toggles with toggle_grid
        self.layer_color = [112, 200, 20, 255]

        self.layer = layer.Layer(self.layer_width, self.layer_height,
                                 self.layer_color, self.layer_region_width,
                                 self.layer_region_height)
Exemple #28
0
    def calculate_cost(self, l, i, j, epsilon, training_data):
        layers = []
        weight = copy.deepcopy(self.weight)
        hidden_num = weight.__len__() - 1
        weight[l][i][j] = weight[l][i][j] + epsilon
        cost = 0
        regularization = 0
        for i in range(hidden_num + 2):
            layers.append([])
        for y, x in training_data:
            # 正向传播
            input_layer = layer.Layer(x)
            input_layer.set_output([1] + x)  # 为训练样本值增加一个偏差单元
            layers[0] = input_layer

            for i in range(1, hidden_num + 1):
                hidden_input = np.dot(layers[i - 1].get_output(),
                                      weight[i - 1])
                hidden_layer = layer.Layer(hidden_input)
                layers[i] = hidden_layer

            output_input = np.dot(layers[hidden_num].get_output(),
                                  weight[hidden_num])
            output_layer = layer.Layer(output_input)
            layers[hidden_num + 1] = output_layer
            res = layers[hidden_num +
                         1].get_output()  # 因为res里面包括了偏差单元,所以res[1]才是输出
            cost += (y * math.log(res[1]) + (1 - y) * math.log(1 - res[1])
                     )  # 按输出单元只有一个来举例

        for l in range(hidden_num + 1):
            for i in range(1, weight[l].__len__()):
                for j in range(weight[l][i].__len__()):
                    regularization += weight[l][i][j] * weight[l][i][j]
        cost = (-1 / training_data.__len__()) * cost + (
            LAMBDA / 2 * training_data.__len__()) * regularization
        return cost
Exemple #29
0
def intersect_layers(layer_a, layer_b):
    input_list = []

    output_layer = layer.Layer(None, [])

    # need to sort tiles to make sure both lists line up
    layer_a.tile_list.sort(key=lambda x: x.tile_id)
    layer_b.tile_list.sort(key=lambda x: x.tile_id)

    for a, b in zip(layer_a.tile_list, layer_b.tile_list):
        input_list.append((output_layer, a, b))

    util.exec_multiprocess(intersect, input_list)

    return output_layer
Exemple #30
0
    def _GeocodeTables(self):
        i = 0
        for table in self._tables:
            print(table)
            if table.geocode:
                table_path = os.path.join(self.gdb_path, table.name)
                geocoded_name = table.geocoded_layer_name
                self._geocode(table_path, geocoded_name)
                geocoded_layer_path = os.path.join(self.gdb_path,
                                                   geocoded_name)
                geocoded_layer = layer.Layer({
                    'path': geocoded_layer_path,
                    'name': geocoded_name,
                    'style': table.geocode_layer_style,
                    'visible': table.visible
                })

                self.new_layers.append(geocoded_layer)
                i += 1
        return i