예제 #1
0
 def __init__(self, layers):
     '''
     initialize a FC neural network
     :param layers: A two-dimension array, which is to record the number of nodes in each layer
     len(layers) = number of layer in NN
     len(layers[i]) = the number of nodes in ith layer 
     '''
     self.connections = Connections()
     self.layers = []
     layer_count = len(layers)  # number of layer
     node_count = 0
     for i in range(layer_count):
         self.layers.append(Layer(
             i, layers[i]))  # Layer[i] is the number of nodes in ith layer
     for layer in range(layer_count -
                        1):  # one connection between two layers
         connections = [
             Connection(upstream_node, downstream_node) for upstream_node in
             self.layers[layer].nodes  # upstream_node at this iteration
             for downstream_node in self.layers[layer + 1].nodes[:-1]
         ]  # downstream_node at this iteration
         for conn in connections:
             self.connections.add_connection(conn)
             conn.downstream_node.append_upstream_connection(conn)
             conn.upstream_node.append_downstream_connection(conn)
예제 #2
0
    def __init__(self):

        # Init basic objects
        self.cropper = Cropper()
        self.extractor = Extractor(self.cropper)
        self.classifier = Classifier(self.extractor.images)
        self.connections = Connections(self.extractor, self.classifier)
        self.visualization = Visualization(self.connections)
예제 #3
0
    def testCreateSegment(self):
        connections = Connections(1024, [0, 1024, 0, 1024])

        segment1 = connections.createSegment(10)
        self.assertEqual(segment1.cell, 10)

        segment2 = connections.createSegment(10)
        self.assertEqual(segment2.cell, 10)

        self.assertEqual([segment1, segment2],
                         list(connections.segmentsForCell(10)))
예제 #4
0
class Point:
    def __init__(self, name, x=0, y=0, constraint='none', load=np.zeros(2)):
        self.location = Location(x, y)
        self.connections = Connections(self)
        self.name = name
        self.constraint = constraint
        self.load = load

    def __getitem__(self, item):
        if item == 'x':
            return self.location.x
        if item == 'y':
            return self.location.y
        if item == 'xy':
            return [self.location.x, self.location.y]

    def direction_array(self):
        return np.array(self['xy'])

    def connect(self, point, first):
        self.connections.connect(point, first)

    def disconnect(self, point, first):
        self.connections.disconnect(point, first)

    def set_location(self, x=0, y=0):
        self.location.set_location('x', x)
        self.location.set_location('y', y)

    def __repr__(self):
        return "[Location: {}, Connections: {}, Constraint: {}, Load: {}]".format(
            self.location, self.connections, self.constraint, self.load)

    def __lt__(self, other):
        return self.name < other.name

    def __gt__(self, other):
        return self.name > other.name

    def set_constraint(self, constraint):
        self.constraint = constraint

    def set_load(self, load):
        self.load = load
예제 #5
0
    def __init__(self, layers):
        """
		初始化一个全连接神经网络
		:param layers: 二维数组,描述神经网络每层节点数
		"""
        self.connections = Connections()
        self.layers = []
        layers_count = len(layers)
        node_count = 0
        for i in range(layers_count - 1):
            self.layers.append(Layer(i, layers[i]))
        for layer in range(layers_count - 1):
            connections = [
                Connection(upstream_node, downstream_node)
                for upstream_node in self.layers[layer].nodes
                for downstream_node in self.layers[layer + 1].nodes[:-1]
            ]
            for conn in connections:
                self.connections.add_connection(conn)
                conn.downstream_node.append_upstream_connection(conn)
                conn.upstream_node.append_downstream_connection(conn)
예제 #6
0
class CloudTrail:

    def __init__(self):
        """CloudTrail Constructor"""
        self.aws_conn = Connections()
        self.ec2c = self.aws_conn.ec2_connection()

    def acl_policy(self, buck_name):
        """policy to allow Cloud Trail logs to be sent to a bucket"""
        # to retrieve the account ID
        groups = self.ec2c.get_all_security_groups()
        account_id = groups[0].owner_id
        policy = {
            "Version": "2012-10-17",
            "Statement": [
                {
                    "Sid": "AWSCloudTrailAclCheck20150319",
                    "Effect": "Allow",
                    "Principal": {
                        "Service": "cloudtrail.amazonaws.com"
                    },
                    "Action": "s3:GetBucketAcl",
                    "Resource": "arn:aws:s3:::%s" % buck_name
                },
                {
                    "Sid": "AWSCloudTrailWrite20150319",
                    "Effect": "Allow",
                    "Principal": {
                        "Service": "cloudtrail.amazonaws.com"
                    },
                    "Action": "s3:PutObject",
                    "Resource": "arn:aws:s3:::%s/AWSLogs/%s/*" % (buck_name, account_id),
                    "Condition": {
                        "StringEquals": {
                            "s3:x-amz-acl": "bucket-owner-full-control"
                        }
                    }
                }
            ]
        }
        return json.JSONEncoder().encode(policy)

    def create_trail(self, conn, bucket_name, trail_name):
        """ Creates and enables a trail that specifies the settings
        for delivery of log data to an Amazon S3 bucket """
        trail = conn.create_trail(name=trail_name, s3_bucket_name=bucket_name)
        if trail:
            conn.start_logging(trail_name)
            print 'CloudTrail Created'
        else:
            print 'unable to create trail'
예제 #7
0
    def testUpdateSynapsePermanence(self):
        """ Creates a synapse and updates its permanence, and makes sure that its
        data was correctly updated.
    """
        connections = Connections(1024, [0, 1024, 0, 1024])
        segment = connections.createSegment(10)
        synapse = connections.createSynapse(segment, 50, .34)

        connections.updateSynapsePermanence(synapse, .21)

        synapseData = connections.dataForSynapse(synapse)
        self.assertAlmostEqual(synapseData.permanence, .21)
예제 #8
0
 def __init__(self, layers):
     '''
     初始化一个全连接神经网络
     :param layers: 二维数组,描述神经网络每层节点数
     '''
     self.connections = Connections()
     self.layers = []
     layer_count = len(layers)
     node_count = 0
     # 将包含相应数量的层加入层列表
     for i in range(layer_count):
         self.layers.append(Layer(i, layers[i]))
     # 为每个连接加入相应节点
     for layer in range(layer_count - 1):
         connections = [
             Connection(upstream_node, downstream_node)
             for upstream_node in self.layers[layer].nodes
             for downstream_node in self.layers[layer + 1].nodes[:-1]
         ]
         for conn in connections:
             self.connections.add_connection(conn)
             conn.downstream_node.append_upstream_connection(conn)
             conn.upstream_node.append_downstream_connection(conn)
예제 #9
0
 def main(self):
     auth.auth(self.b, self.selector)
     cons = Conss()
     # if cons.toBeMessaged():
     #     print("the file is empty so we are going to get new connecitons")
     #     cons.getConnection(self.b,self.selector,self.soup)
     message_file = open(credentials.MESSAGE_FILE_PATH, "r")
     message = message_file.read()
     # lastPresent = False
     # last = open(credentials.LAST_CONNECTION_SENT_MESSAGE_FILE_PATH,"r")
     # if last == "":
     #     lastPresent = True
     # print=(last)
     for connection in cons.loadConnections():
         # print(f"{connection.name} {connection.profile}")
         # if not lastPresent:
         #     if f"{connection.name} {connection.profile}" == last:
         #         lastPresent = True
         #     else:
         #         continue
         # print("sending connection message")
         connection.sendMessage(self.b, self.selector, message, webdriver)
     message_file.close()
예제 #10
0
파일: NetWork.py 프로젝트: liguoyu1/python
	def __init__(self, layers):
		"""
		初始化一个全连接神经网络
		:param layers: 二维数组,描述神经网络每层节点数
		"""
		self.connections = Connections()
		self.layers = []
		layers_count = len(layers)
		node_count = 0
		for i in range(layers_count-1):
			self.layers.append(Layer(i,layers[i]))
		for layer in range(layers_count-1):
			connections = [Connection(upstream_node,downstream_node)
			               for upstream_node in self.layers[layer].nodes
							for downstream_node in self.layers[layer+1].nodes[:-1]]
			for conn in connections:
				self.connections.add_connection(conn)
				conn.downstream_node.append_upstream_connection(conn)
				conn.upstream_node.append_downstream_connection(conn)
예제 #11
0
    def testSynapseReuse(self):
        """ Creates a synapse over the synapses per segment limit, and verifies
        that the lowest permanence synapse is removed to make room for the new
        synapse.
    """
        connections = Connections(1024, [0, 1024, 0, 1024], 1024, 2)
        segment = connections.createSegment(10)

        synapse1 = connections.createSynapse(segment, 50, .34)
        synapse2 = connections.createSynapse(segment, 51, .48)

        synapses = connections.synapsesForSegment(segment)
        self.assertEqual(set([synapse1, synapse2]), synapses)

        # Add an additional synapse to force it over the limit of num synapses
        # per segment.
        connections.createSynapse(segment, 52, .52)

        # Ensure lower permanence synapse was removed.
        self.assertEqual(
            set([51, 52]),
            set(synapse.presynapticCell
                for synapse in connections.synapsesForSegment(segment)))
예제 #12
0
    def __init__(self):
        aws_conn = Connections()
        self.cTrail_conn = aws_conn.ct_connection()
        self.ec2_conn = aws_conn.ec2_connection()
        self.cW_conn = aws_conn.cw_connection()
        self.autoS_conn = aws_conn.as_connection()
        self.s3_conn = aws_conn.s3_connection()

        # AWS Services
        self.ec2 = AWS_Compute()
        self.s3service = aws_S3()
        self.cW = CloudWatch()
        self.autoS = AutoScale()
        self.cTrail = CloudTrail()

        # OpenStack
        OSConn = OSConnections()
        self.openS = OpenStack()
        self.compute_driver = OSConn.os_compute_conn()
        self.storage_driver = OSConn.os_storage_conn()
예제 #13
0
    def testDestroySegmentsThenReachLimit(self):
        """ Destroy some segments then verify that the maxSegmentsPerCell is still
        correctly applied.
    """
        connections = Connections(1024, [0, 1024, 0, 1024], 2, 2)

        segment1 = connections.createSegment(11)
        segment2 = connections.createSegment(11)

        self.assertEqual(2, connections.numSegments())
        connections.destroySegment(segment1)
        connections.destroySegment(segment2)
        self.assertEqual(0, connections.numSegments())

        connections.createSegment(11)
        self.assertEqual(1, connections.numSegments())
        connections.createSegment(11)
        self.assertEqual(2, connections.numSegments())
        segment3 = connections.createSegment(11)
        self.assertEqual(2, connections.numSegments(11))
        self.assertEqual(2, connections.numSegments())
예제 #14
0
    def testDestroySynapsesThenReachLimit(self):
        """ Destroy some synapses then verify that the maxSynapsesPerSegment is
        still correctly applied.
    """
        connections = Connections(1024, [0, 1024, 0, 1024], 2, 2)

        segment = connections.createSegment(10)

        synapse1 = connections.createSynapse(segment, 201, .85)
        synapse2 = connections.createSynapse(segment, 202, .85)

        self.assertEqual(2, connections.numSynapses())
        connections.destroySynapse(synapse1)
        connections.destroySynapse(synapse2)
        self.assertEqual(0, connections.numSynapses())

        connections.createSynapse(segment, 201, .85)
        self.assertEqual(1, connections.numSynapses())
        connections.createSynapse(segment, 202, .90)
        self.assertEqual(2, connections.numSynapses())
        synapse3 = connections.createSynapse(segment, 203, .8)
        self.assertEqual(2, connections.numSynapses())
예제 #15
0
class Network(object):
    def __init__(self, layers):
        '''
        初始化一个全连接神经网络
        :param layers: 二维数组,描述神经网络每层节点数
        '''
        self.connections = Connections()
        self.layers = []
        layer_count = len(layers)
        node_count = 0
        # 将包含相应数量的层加入层列表
        for i in range(layer_count):
            self.layers.append(Layer(i, layers[i]))
        # 为每个连接加入相应节点
        for layer in range(layer_count - 1):
            connections = [
                Connection(upstream_node, downstream_node)
                for upstream_node in self.layers[layer].nodes
                for downstream_node in self.layers[layer + 1].nodes[:-1]
            ]
            for conn in connections:
                self.connections.add_connection(conn)
                conn.downstream_node.append_upstream_connection(conn)
                conn.upstream_node.append_downstream_connection(conn)

    def train(self, labels, data_set, rate, iteration):
        '''
        训练神经网络
        :param labels: 数组,训练样本标签。每个元素是一个样本的标签
        :param data_set: 二维数组,训练样本特征。每个元素是一个样本的特征
        :param rate:
        :param iteration:
        :return:
        '''
        for i in range(iteration):
            print len(data_set), len(labels)
            for d in range(len(data_set)):
                self.train_one_sample(labels[d], data_set[d], rate)

    def train_one_sample(self, label, sample, rate):
        '''
        内部函数,用一个样本训练网络
        :param label:
        :param sample:
        :param rate:
        :return:
        '''
        self.predict(sample)
        self.calc_delta(label)
        self.update_weight(rate)

    def calc_delta(self, label):
        '''
        内部函数,计算每个节点的delta
        :param label:
        :return:
        '''
        output_nodes = self.layers[-1].nodes
        for i in range(len(label)):
            output_nodes[i].calc_output_layer_delta(label[i])
        for layer in self.layers[-2::-1]:
            for node in layer.nodes:
                node.calc_hidden_layer_delta()

    def update_weight(self, rate):
        '''
        内部函数,更新每个连接的权重
        :param rate:
        :return:
        '''
        for layer in self.layers[:-1]:
            for node in layer.nodes:
                for conn in node.downstream:
                    conn.update_weight(rate)

    def calc_gradient(self):
        '''
        内部函数,计算每个连接的梯度
        :return:
        '''
        for layer in self.layers[:-1]:
            for node in layer.nodes:
                for conn in node.downstream:
                    conn.calc_gradient()

    def get_gradient(self, label, sample):
        '''
        获得网络在一个样本下,每个连接上的梯度
        :param label:
        :param sample:
        :return:
        '''
        self.predict(sample)
        self.calc_delta(label)
        self.calc_gradient()

    def predict(self, sample):
        '''
        根据输入的样本预测输出值
        :param sample: 数组,样本的特征,也就是网络的输入向量
        :return:
        '''
        self.layers[0].set_output(sample)
        for i in range(1, len(self.layers)):
            self.layers[i].calc_output()
        return map(lambda node: node.output, self.layers[i].node[:-1])

    def dump(self):
        '''
        打印网络信息
        :return:
        '''
        for layer in self.layers:
            layer.dump()
예제 #16
0
    def testReachSegmentLimitMultipleTimes(self):
        """ Hit the maxSynapsesPerSegment threshold multiple times. Make sure it
        works more than once.
    """
        connections = Connections(1024, [0, 1024, 0, 1024], 2, 2)

        segment = connections.createSegment(10)
        connections.createSynapse(segment, 201, .85)
        self.assertEqual(1, connections.numSynapses())
        connections.createSynapse(segment, 202, .9)
        self.assertEqual(2, connections.numSynapses())
        connections.createSynapse(segment, 203, .8)
        self.assertEqual(2, connections.numSynapses())
        synapse = connections.createSynapse(segment, 204, .8)
        self.assertEqual(2, connections.numSynapses())
예제 #17
0
    def testDestroySegment(self):
        """ Creates a segment, destroys it, and makes sure it got destroyed along
        with all of its synapses.
    """
        connections = Connections(1024, [0, 1024, 0, 1024])

        connections.createSegment(10)
        segment2 = connections.createSegment(20)
        connections.createSegment(30)
        connections.createSegment(40)

        connections.createSynapse(segment2, 80, 0.85)
        connections.createSynapse(segment2, 81, 0.85)
        connections.createSynapse(segment2, 82, 0.15)

        self.assertEqual(4, connections.numSegments())
        self.assertEqual(3, connections.numSynapses())

        connections.destroySegment(segment2)

        self.assertEqual(3, connections.numSegments())
        self.assertEqual(0, connections.numSynapses())

        (numActiveConnected,
         numActivePotential) = connections.computeActivity([80, 81, 82], 0.5)

        self.assertEqual(0, numActiveConnected[segment2.UID])
        self.assertEqual(0, numActivePotential[segment2.UID])
예제 #18
0
class Controller:
    def __init__(self):

        # Init basic objects
        self.cropper = Cropper()
        self.extractor = Extractor(self.cropper)
        self.classifier = Classifier(self.extractor.images)
        self.connections = Connections(self.extractor, self.classifier)
        self.visualization = Visualization(self.connections)

    # Load connections from the disk
    def load_connections_from_disk(self):
        self.connections.load_connections_from_disk()

    # Generate connections
    def generate_connections(self):

        # Clean faces and clusters directory
        self.clean_clusters_directory()
        self.clean_faces_directory()

        # Crop faces
        self.cropper.crop_images_in_directory(DATA_PATH)

        # Extract features for each face
        self.extractor.extract_features_from_directory(FACES_PATH)

        # Remove images that do not represent a face (remove false positives)
        self.extractor.fix_mapping_dictionary()

        # Set the cropper images
        self.classifier.set_face_images_list(self.extractor.images)

        # Cluster the faces
        self.classifier.cluster()

        # Set the extractor and classifier after the clustering
        self.connections.set_extractor_classifier(self.extractor,
                                                  self.classifier)

        # Remove duplicate faces (remove false positives)
        self.connections.remove_duplicated_faces()

        # Generate the connections
        self.connections.generate_connections()

        # Set connections for the visualization object
        self.visualization.set_connections(self.connections)

    # Get clusters
    def get_results(self):
        return self.connections.get_clusters()

    # Get all personal pictures
    def get_all_personal_pictures(self, clust_num):
        return self.visualization.get_all_personal_pictures(clust_num)

    # Draw personal connections graph
    def draw_personal_graph(self, clust_num):
        self.visualization.draw_personal_graph(clust_num)

    # Get the connection's pictures
    def get_pictures_of_connection(self, cluster_1, cluster_2):
        return self.visualization.get_pictures_of_connection(
            cluster_1, cluster_2)

    # Clean clusters directory
    def clean_clusters_directory(self):
        for filename in os.listdir(CLUSTERS_PATH):
            file_path = os.path.join(CLUSTERS_PATH, filename)
            try:
                if os.path.isfile(file_path) or os.path.islink(file_path):
                    os.unlink(file_path)
                elif os.path.isdir(file_path):
                    shutil.rmtree(file_path)
            except Exception:
                pass

    # Clean faces directory
    def clean_faces_directory(self):
        for filename in os.listdir(FACES_PATH):
            file_path = os.path.join(FACES_PATH, filename)
            try:
                if os.path.isfile(file_path) or os.path.islink(file_path):
                    os.unlink(file_path)
                elif os.path.isdir(file_path):
                    shutil.rmtree(file_path)
            except Exception:
                pass

    # Count directories
    def count_directories(self, path):
        return len([
            name for name in os.listdir(path)
            if os.path.isdir(os.path.join(path, name))
        ])

    # Count files
    def count_files(self, path):
        return len([
            name for name in os.listdir(path)
            if os.path.isfile(os.path.join(path, name))
        ])
예제 #19
0
    def testComputeActivity(self):
        """ Creates a sample set of connections, and makes sure that computing the
        activity for a collection of cells with no activity returns the right
        activity data.
    """
        connections = Connections(1024, [0, 1024, 0, 1024])

        # Cell with 1 segment.
        # Segment with:
        # - 1 connected synapse: active
        # - 2 matching synapses
        segment1a = connections.createSegment(10)
        connections.createSynapse(segment1a, 150, .85)
        connections.createSynapse(segment1a, 151, .15)

        # Cell with 1 segment.
        # Segment with:
        # - 2 connected synapse: 2 active
        # - 3 matching synapses: 3 active
        segment2a = connections.createSegment(20)
        connections.createSynapse(segment2a, 80, .85)
        connections.createSynapse(segment2a, 81, .85)
        synapse = connections.createSynapse(segment2a, 82, .85)
        connections.updateSynapsePermanence(synapse, .15)

        inputVec = [50, 52, 53, 80, 81, 82, 150, 151]
        (numActiveConnected,
         numActivePotential) = connections.computeActivity(inputVec, .5)

        self.assertEqual(1, numActiveConnected[segment1a.UID])
        self.assertEqual(2, numActivePotential[segment1a.UID])

        self.assertEqual(2, numActiveConnected[segment2a.UID])
        self.assertEqual(3, numActivePotential[segment2a.UID])
예제 #20
0
 def __init__(self, name, x=0, y=0, constraint='none', load=np.zeros(2)):
     self.location = Location(x, y)
     self.connections = Connections(self)
     self.name = name
     self.constraint = constraint
     self.load = load
예제 #21
0
    def testReuseSegmentWithDestroyedSynapses(self):
        """ Destroy a segment that has a destroyed synapse and a non-destroyed
        synapse. Create a new segment in the same place. Make sure its synapse
        count is correct.
    """
        connections = Connections(1024, [0, 1024, 0, 1024])

        segment = connections.createSegment(11)

        synapse1 = connections.createSynapse(segment, 201, .85)
        connections.createSynapse(segment, 202, .85)

        connections.destroySynapse(synapse1)

        self.assertEqual(1, connections.numSynapses(segment))

        connections.destroySegment(segment)

        reincarnated = connections.createSegment(11)

        self.assertEqual(0, connections.numSynapses(reincarnated))
        self.assertEqual(0, len(connections.synapsesForSegment(reincarnated)))
예제 #22
0
class Network(object):
    def __init__(self, layers):
        '''
        initialize a FC neural network
        :param layers: A two-dimension array, which is to record the number of nodes in each layer
        len(layers) = number of layer in NN
        len(layers[i]) = the number of nodes in ith layer 
        '''
        self.connections = Connections()
        self.layers = []
        layer_count = len(layers)  # number of layer
        node_count = 0
        for i in range(layer_count):
            self.layers.append(Layer(
                i, layers[i]))  # Layer[i] is the number of nodes in ith layer
        for layer in range(layer_count -
                           1):  # one connection between two layers
            connections = [
                Connection(upstream_node, downstream_node) for upstream_node in
                self.layers[layer].nodes  # upstream_node at this iteration
                for downstream_node in self.layers[layer + 1].nodes[:-1]
            ]  # downstream_node at this iteration
            for conn in connections:
                self.connections.add_connection(conn)
                conn.downstream_node.append_upstream_connection(conn)
                conn.upstream_node.append_downstream_connection(conn)

    def train(self, labels, dataset, rate, iteration):
        '''
        train the NN
        :param labels: target label of dataset 
        :param dataset: a matrix contain the features of each sample
        :param rate: learning rate
        :param iteration: training times
        :return: 
        '''
        for i in range(iteration):
            for data in range(len(dataset)):
                self.train_one_sample(labels[data], dataset[data], rate)

    def train_one_sample(self, label, sample, rate):
        '''
        inner function to train the NN with one sample 
        :param label: target label of one data vector of features
        :param sample: a matrix of data, each line is list of features of one data
        :param rate: learning rate
        :return: 
        '''
        self.predict(sample)
        self.calc_delta(label)
        self.update_weight(rate)

    def calc_delta(self, label):
        '''
        inner function to compute each node's delta
        using back propagation algorithm
        :param label: 
        :return: 
        '''
        # nodes in output layers
        output_nodes = self.layers[-1].nodes
        # calculate delta of output layer
        for i in range(len(label)):
            output_nodes[i].calc_output_layer_delta(label[i])
        # calculate delta of hidden layers
        for layer in self.layers[
                -2::-1]:  # from second last to the first layer
            for node in layer.nodes:
                node.calc_hidden_layer_delta()

    def update_weight(self, rate):
        '''
        inner function to update weight of each connection
        :param rate: 
        :return: 
        '''
        for layer in self.layers[:-1]:  # except last(output) layer
            for node in layer.nodes:
                for conn in node.downstream:
                    conn.update_weight(rate)

    def calc_gradient(self):
        '''
        inner function to compute the gradient of each connection
        :return: 
        '''
        for layer in self.layers[:-1]:
            for node in layer.nodes:
                for conn in node.downstream:
                    conn.calc_gradient()

    def get_gradient(self, label, sample):
        '''
        obtain the gradient of each connection given a certain label of NN
        :param label: 
        :param sample: 
        :return: 
        '''
        self.predict(sample)
        self.calc_delta(label)
        self.calc_gradient()

    def predict(self, sample):
        '''
        predict the output given the input sample data
        the result will be used to calculate delta and then update weight
        :param sample: 
        :return: 
        '''
        self.layers[0].set_output(sample)
        for i in range(1, len(self.layers)):
            self.layers[i].calc_output()
        # return the list of output (prediction) of output layer except ConstNode
        return map(lambda node: node.output,
                   self.layers[-1].nodes[:-1])  # the last node is ConstNode

    def dump(self):
        '''
        print info of NN
        :return: 
        '''
        for layer in self.layers:
            layer.dump()
예제 #23
0
class NetWork(object):
    def __init__(self, layers):
        """
		初始化一个全连接神经网络
		:param layers: 二维数组,描述神经网络每层节点数
		"""
        self.connections = Connections()
        self.layers = []
        layers_count = len(layers)
        node_count = 0
        for i in range(layers_count - 1):
            self.layers.append(Layer(i, layers[i]))
        for layer in range(layers_count - 1):
            connections = [
                Connection(upstream_node, downstream_node)
                for upstream_node in self.layers[layer].nodes
                for downstream_node in self.layers[layer + 1].nodes[:-1]
            ]
            for conn in connections:
                self.connections.add_connection(conn)
                conn.downstream_node.append_upstream_connection(conn)
                conn.upstream_node.append_downstream_connection(conn)

    def train(self, data_set, labels, rate, iteration):
        """
		训练神经网络
		:param data_set: 二维数组 训练样本的特征,每个元素是一个样本特征
		:param labels: 数组,训练样本标签。没个元素是一个样本的标签
		:param rate: 学习率
		:param iteration: 迭代次数
		:return:
		"""
        for i in range(iteration):
            for d in range(len(data_set)):
                self.train_one_sample(data_set[d], labels[d], rate)

    def train_one_sample(self, sample, label, rate):
        """
		内部函数
		一次一个样本训练网络
		:param sample:
		:param label:
		:param rate:
		:return:
		"""
        self.predict(sample)
        self.calc_delta(label)
        self.update_weight(rate)

    def calc_delta(self, label):
        """
		内部函数
		计算每个节点的delta
		:param label:
		:return:
		"""
        output_nodes = self.layers[-1].nodes
        for i in range(len(label)):
            output_nodes[i].calc_output_layer_delta(label[i])

        for layer in self.layers[-2::-1]:
            for node in layer:
                node.calc_hidden_layer_delta()

    def update_weight(self, rate):
        """
		内部函数,
		更新每个连接权重
		:param rate:
		:return:
		"""
        for layer in self.layers[:-1]:
            for node in layer.nodes:
                for conn in node.downstream:
                    conn.update_weight(rate)

    def calc_gradient(self):
        """
		内部函数
		计算每个连接的梯度
		:return:
		"""
        for layer in self.layers[:-1]:
            for node in layer.nodes:
                for conn in node.downstream:
                    conn.calc_gradient()

    def get_gradient(self, sample, label):
        """
		获得网络在一个样本下,每个连接上的梯度
		:param sample: 样本输入
		:param label: 样本标签
		:return:
		"""
        self.predict(sample)
        self.calc_delta(label)
        self.calc_gradient()

    def predict(self, sample):
        """
		根据输入样本预测输出值
		:param sample: 数组,样本的特征,也就是网络的输入向量
		:return:
		"""
        self.layers[0].set_output(sample)
        for i in range(1, len(self.layers)):
            self.layers[i].calc_output()
        return map(lambda node: node.output, self.layers[-1].modes[:-1])

    def dump(self):
        """
		打印网络信息
		:return:
		"""
        for layer in self.layers:
            layer.dump()
예제 #24
0
    def testCreateSegmentReuse(self):
        connections = Connections(1024, [0, 1024, 0, 1024], 2)

        segment1 = connections.createSegment(42)
        connections.createSynapse(segment1, 1, .5)
        connections.createSynapse(segment1, 2, .5)

        # Let some time pass.
        connections.startNewIteration()
        connections.startNewIteration()
        connections.startNewIteration()

        # Create a segment with 3 synapse.
        segment2 = connections.createSegment(42)
        connections.createSynapse(segment2, 1, .5)
        connections.createSynapse(segment2, 2, .5)
        connections.createSynapse(segment2, 3, .5)
        connections.startNewIteration()

        # Give the first segment some activity.
        connections.recordSegmentActivity(segment1)

        # Create a new segment with 1 synapse.
        segment3 = connections.createSegment(42)
        connections.createSynapse(segment3, 1, .5)

        segments = connections.segmentsForCell(42)
        self.assertEqual(2, len(segments))

        # Verify first segment is still there with the same synapses.
        self.assertEqual(
            set([1, 2]),
            set(synapse.presynapticCell
                for synapse in connections.synapsesForSegment(segments[0])))

        # Verify second segment has been replaced.
        self.assertEqual(
            set([1]),
            set(synapse.presynapticCell
                for synapse in connections.synapsesForSegment(segments[1])))

        # Verify the UIDs were properly reused.
        self.assertLess(segment1.UID, 2)
        self.assertLess(segment3.UID, 2)
        self.assertTrue(segment1 is connections.segmentForUID(segment1.UID))
        self.assertTrue(segment3 is connections.segmentForUID(segment3.UID))
예제 #25
0
    def testWriteRead(self):
        c1 = Connections(1024, [0, 1024, 0, 1024])

        # Add data before serializing
        s1 = c1.createSegment(0)
        c1.createSynapse(s1, 254, 0.1173)

        s2 = c1.createSegment(100)
        c1.createSynapse(s2, 20, 0.3)

        c1.createSynapse(s1, 40, 0.3)

        s3 = c1.createSegment(0)
        c1.createSynapse(s3, 0, 0.5)
        c1.createSynapse(s3, 1, 0.5)

        s4 = c1.createSegment(10)
        c1.createSynapse(s4, 0, 0.5)
        c1.createSynapse(s4, 1, 0.5)
        c1.destroySegment(s4)

        proto1 = ConnectionsProto_capnp.ConnectionsProto.new_message()
        c1.write(proto1)

        # Write the proto to a temp file and read it back into a new proto
        with tempfile.TemporaryFile() as f:
            proto1.write(f)
            f.seek(0)
            proto2 = ConnectionsProto_capnp.ConnectionsProto.read(f)

        # Load the deserialized proto
        c2 = Connections.read(proto2)

        # Check that the two connections objects are functionally equal
        self.assertEqual(c1, c2)
예제 #26
0
 def __init__(self):
     """CloudTrail Constructor"""
     self.aws_conn = Connections()
     self.ec2c = self.aws_conn.ec2_connection()
예제 #27
0
 def __defaults__(self):
     self.tag = 'Energy Component'
     self.Connections = Connections()
예제 #28
0
    def testPathsNotInvalidatedByOtherDestroys(self):
        """ Creates segments and synapses, then destroys segments and synapses on
        either side of them and verifies that existing Segment and Synapse
        instances still point to the same segment / synapse as before.
    """
        connections = Connections(1024, [0, 1024, 0, 1024])
        segment1 = connections.createSegment(11)
        connections.createSegment(12)
        segment3 = connections.createSegment(13)
        connections.createSegment(14)
        segment5 = connections.createSegment(15)

        synapse1 = connections.createSynapse(segment3, 201, .85)
        synapse2 = connections.createSynapse(segment3, 202, .85)
        synapse3 = connections.createSynapse(segment3, 203, .85)
        synapse4 = connections.createSynapse(segment3, 204, .85)
        synapse5 = connections.createSynapse(segment3, 205, .85)

        self.assertEqual(203, synapse3.presynapticCell)
        connections.destroySynapse(synapse1)
        self.assertEqual(203, synapse3.presynapticCell)
        connections.destroySynapse(synapse5)
        self.assertEqual(203, synapse3.presynapticCell)

        connections.destroySegment(segment1)
        self.assertEqual(set([synapse2, synapse3, synapse4]),
                         connections.synapsesForSegment(segment3))
        connections.destroySegment(segment5)
        self.assertEqual(set([synapse2, synapse3, synapse4]),
                         connections.synapsesForSegment(segment3))
        self.assertEqual(203, synapse3.presynapticCell)
예제 #29
0
    def testDestroySynapse(self):
        """ Creates a segment, creates a number of synapses on it, destroys a
        synapse, and makes sure it got destroyed.
    """
        connections = Connections(1024, [0, 1024, 0, 1024])

        segment = connections.createSegment(20)
        synapse1 = connections.createSynapse(segment, 80, .85)
        synapse2 = connections.createSynapse(segment, 81, .85)
        synapse3 = connections.createSynapse(segment, 82, .15)

        self.assertEqual(3, connections.numSynapses())

        connections.destroySynapse(synapse2)

        self.assertEqual(2, connections.numSynapses())
        self.assertEqual({synapse1, synapse3},
                         connections.synapsesForSegment(segment))
        (numActiveConnected,
         numActivePotential) = connections.computeActivity([80, 81, 82], .5)

        self.assertEqual(1, numActiveConnected[segment.UID])
        self.assertEqual(2, numActivePotential[segment.UID])
예제 #30
0
    def testDestroySegmentWithDestroyedSynapses(self):
        """ Destroy a segment that has a destroyed synapse and a non-destroyed
        synapse. Make sure nothing gets double-destroyed.
    """
        connections = Connections(1024, [0, 1024, 0, 1024])

        segment1 = connections.createSegment(11)
        segment2 = connections.createSegment(12)

        connections.createSynapse(segment1, 101, .85)
        synapse2a = connections.createSynapse(segment2, 201, .85)
        connections.createSynapse(segment2, 202, .85)

        self.assertEqual(3, connections.numSynapses())

        connections.destroySynapse(synapse2a)

        self.assertEqual(2, connections.numSegments())
        self.assertEqual(2, connections.numSynapses())

        connections.destroySegment(segment2)

        self.assertEqual(1, connections.numSegments())
        self.assertEqual(1, connections.numSynapses())
예제 #31
0
# -*- coding: utf-8 -*-
from Connections import Connections
import re

dg = Connections('http://m.jizdnirady.cz/ConnMHD.aspx')
a = dg.upcoming_departures('Dejvická')

print(a)
예제 #32
0
파일: NetWork.py 프로젝트: liguoyu1/python
class NetWork(object):
	def __init__(self, layers):
		"""
		初始化一个全连接神经网络
		:param layers: 二维数组,描述神经网络每层节点数
		"""
		self.connections = Connections()
		self.layers = []
		layers_count = len(layers)
		node_count = 0
		for i in range(layers_count-1):
			self.layers.append(Layer(i,layers[i]))
		for layer in range(layers_count-1):
			connections = [Connection(upstream_node,downstream_node)
			               for upstream_node in self.layers[layer].nodes
							for downstream_node in self.layers[layer+1].nodes[:-1]]
			for conn in connections:
				self.connections.add_connection(conn)
				conn.downstream_node.append_upstream_connection(conn)
				conn.upstream_node.append_downstream_connection(conn)

	def train(self, data_set, labels, rate, iteration):
		"""
		训练神经网络
		:param data_set: 二维数组 训练样本的特征,每个元素是一个样本特征
		:param labels: 数组,训练样本标签。没个元素是一个样本的标签
		:param rate: 学习率
		:param iteration: 迭代次数
		:return:
		"""
		for i in range(iteration):
			for d in range(len(data_set)):
				self.train_one_sample(data_set[d],labels[d],rate)

	def train_one_sample(self, sample, label, rate):
		"""
		内部函数
		一次一个样本训练网络
		:param sample:
		:param label:
		:param rate:
		:return:
		"""
		self.predict(sample)
		self.calc_delta(label)
		self.update_weight(rate)

	def calc_delta(self, label):
		"""
		内部函数
		计算每个节点的delta
		:param label:
		:return:
		"""
		output_nodes = self.layers[-1].nodes
		for i in range(len(label)):
			output_nodes[i].calc_output_layer_delta(label[i])

		for layer in self.layers[-2::-1]:
			for node in layer:
				node.calc_hidden_layer_delta()

	def update_weight(self, rate):
		"""
		内部函数,
		更新每个连接权重
		:param rate:
		:return:
		"""
		for layer in self.layers[:-1]:
			for node in layer.nodes:
				for conn in node.downstream:
					conn.update_weight(rate)

	def calc_gradient(self):
		"""
		内部函数
		计算每个连接的梯度
		:return:
		"""
		for layer in self.layers[:-1]:
			for node in layer.nodes:
				for conn in node.downstream:
					conn.calc_gradient()

	def get_gradient(self, sample, label):
		"""
		获得网络在一个样本下,每个连接上的梯度
		:param sample: 样本输入
		:param label: 样本标签
		:return:
		"""
		self.predict(sample)
		self.calc_delta(label)
		self.calc_gradient()

	def predict(self, sample):
		"""
		根据输入样本预测输出值
		:param sample: 数组,样本的特征,也就是网络的输入向量
		:return:
		"""
		self.layers[0].set_output(sample)
		for i in range(1 , len(self.layers)):
			self.layers[i].calc_output()
		return map(lambda node : node.output, self.layers[-1].modes[:-1])

	def dump(self):
		"""
		打印网络信息
		:return:
		"""
		for layer in self.layers:
			layer.dump()