Пример #1
0
def generator(z, is_training=True, reuse=False):
    # Network Architecture is exactly same as in infoGAN (https://arxiv.org/abs/1606.03657)
    with tf.variable_scope("generator", reuse=reuse):
        net = tf.nn.relu(
            bn(linear(z, 1024, scope='g_fc1'),
               is_training=is_training,
               scope='linear1'))
        net = tf.nn.relu(
            bn(linear(net, 128 * 7 * 7, scope='g_fc2'),
               is_training=is_training,
               scope='linear2'))
        net = tf.reshape(net, [-1, 7, 7, 128])
        batch_size = net.get_shape().as_list()[0]
        net = deconv2d(net,
                       output_size=14,
                       output_channel=64,
                       kernel=(4, 4),
                       stride=(2, 2),
                       activation='relu',
                       use_bn=True,
                       is_training=True,
                       name='d_conv1')
        out = deconv2d(net,
                       output_size=28,
                       output_channel=1,
                       kernel=(4, 4),
                       stride=(2, 2),
                       activation='sigmoid',
                       name='gen_images')
        return out
Пример #2
0
    def __call__(self, x, y=None, sn=False, is_training=True, reuse=False):
        with tf.variable_scope(self.name, reuse=reuse):
            batch_size = x.get_shape().as_list()[0]
            if y is not None:
                ydim = y.get_shape().as_list()[-1]
                y = tf.reshape(y, [batch_size, 1, 1, ydim])
                x = conv_cond_concat(x, y)  # [bz, 28, 28, 11]

            x = tf.reshape(x, (batch_size, -1))
            net = lrelu(dense(x, 512, sn=sn, name='d_fc1'), name='d_l1')
            net = lrelu(bn(dense(net, 256, sn=sn, name='d_fc2'),
                           is_training,
                           name='d_bn2'),
                        name='d_l2')
            net = lrelu(bn(dense(net, 128, sn=sn, name='d_fc3'),
                           is_training,
                           name='d_bn3'),
                        name='d_l3')
            yd = dense(net, 1, sn=sn, name="D_dense")

            if self.class_num:
                yc = dense(net, self.class_num, sn=sn, name='C_dense')
                return yd, net, yc
            else:
                return yd, net
Пример #3
0
def routePacket(data, dest):
    ttl = bn(data[4]) - 1
    if ttl > 0:
        data[4] = nb(ttl, 1)
        if dest in knownTable:
            pckNum = bn(data[1:3])
            sendPacket(data, knownTable[dest]['IP'], knownTable[dest]['PORT'], pckNum)
    pass
Пример #4
0
def parseChunking(data):
    if data[42:45] in chunkingHistory:
        if chunkingHistory[data[42:45]]['chunkId'] + 1 == bn(data[45:53]):
            chunkingHistory[data[42:45]]['chunkId'] += 1
            chunkingHistory[data[42:45]]['message'] += data[53:]
            if data[41] == '\x01':
                result = chunkingHistory[data[42:45]]['message']
                del chunkingHistory[data[42:45]]
                return result
    else:
        chunkingHistory[data[42:45]] = {
            'message': data[53:],
            'chunkId': bn(data[45:53])
        }
    pass
Пример #5
0
def discriminator(x, is_training=True, reuse=False):
    # Network Architecture is exactly same as in infoGAN (https://arxiv.org/abs/1606.03657)
    batch_size = x.get_shape().as_list()[0]
    with tf.variable_scope("discriminator", reuse=reuse):
        net = conv2d(x,
                     output_dim=64,
                     kernel=(4, 4),
                     stride=(2, 2),
                     activation='lrelu',
                     name='conv1')
        net = conv2d(net,
                     output_dim=128,
                     kernel=(4, 4),
                     stride=(2, 2),
                     activation='lrelu',
                     use_bn=True,
                     is_training=is_training,
                     name='conv2')
        net = tf.reshape(net, [batch_size, -1])
        net = lrelu(
            bn(linear(net, 1024, scope='d_fc3'),
               is_training=is_training,
               scope='linear1'))
        out_logit = linear(net, 1, scope='linear2')
        out = tf.nn.sigmoid(out_logit)
        return out, out_logit, net
Пример #6
0
    def __call__(self, z, y=None, is_training=True, reuse=False):
        with tf.variable_scope(self.name, reuse=reuse):
            batch_size = z.get_shape().as_list()[0]
            if y is not None:
                z = tf.concat([z, y], 1)

            net = tf.nn.relu(
                bn(dense(z, 128, name='g_fc1'), is_training, name='g_bn1'))
            net = tf.nn.relu(
                bn(dense(net, 256, name='g_fc2'), is_training, name='g_bn2'))
            net = tf.nn.relu(
                bn(dense(net, 512, name='g_fc3'), is_training, name='g_bn3'))
            net = tf.nn.relu(
                bn(dense(net, 1024, name='g_fc4'), is_training, name='g_bn4'))
            net = tf.nn.sigmoid(dense(net, 784, name='g_fc5'))

            out = tf.reshape(net, (batch_size, 28, 28, 1))
            return out
Пример #7
0
 def __call__(self, x, y=None, sn=False, is_training=True, reuse=False):
     with tf.variable_scope(self.name, reuse=reuse):
         batch_size = x.get_shape().as_list()[0]
         if y is not None:
             ydim = y.get_shape().as_list()[-1]
             y = tf.reshape(y, [batch_size, 1, 1, ydim])
             x = conv_cond_concat(x, y)  # [bz, 28, 28, 11]
         # [bz, 14, 14, 64]
         net = lrelu(conv2d(x,
                            64,
                            4,
                            4,
                            2,
                            2,
                            sn=sn,
                            padding="SAME",
                            name='d_conv1'),
                     name='d_l1')
         # [bz, 7, 7, 128]
         net = lrelu(bn(conv2d(net,
                               128,
                               4,
                               4,
                               2,
                               2,
                               sn=sn,
                               padding="SAME",
                               name='d_conv2'),
                        is_training,
                        name='d_bn2'),
                     name='d_l2')
         net = tf.reshape(net, [batch_size, 7 * 7 * 128])
         # [bz, 1024]
         net = lrelu(bn(dense(net, 1024, sn=sn, name='d_fc3'),
                        is_training,
                        name='d_bn3'),
                     name='d_l3')
         # [bz, 1]
         yd = dense(net, 1, sn=sn, name='D_dense')
         if self.class_num:
             yc = dense(net, self.class_num, sn=sn, name='C_dense')
             return yd, net, yc
         else:
             return yd, net
Пример #8
0
    def __call__(self, x, is_training=True, reuse=False):
        with tf.variable_scope(self.name, reuse):
            net = lrelu(bn(dense(x, 64, name='c_fc1'),
                           is_training,
                           name='c_bn1'),
                        name='c_l1')
            out_logit = dense(net, self.class_num, name='c_l2')
            out = tf.nn.softmax(out_logit)

            return out_logit, out
Пример #9
0
    def __call__(self, z, y=None, is_training=True, reuse=False):
        with tf.variable_scope(self.name, reuse=reuse):
            batch_size = z.get_shape().as_list()[0]
            if y is not None:
                z = tf.concat([z, y], 1)  # [bz,zdim+10]

            net = tf.nn.relu(
                bn(dense(z, 1024, name='g_fc1'), is_training, name='g_bn1'))
            net = tf.nn.relu(
                bn(dense(net, 128 * 7 * 7, name='g_fc2'),
                   is_training,
                   name='g_bn2'))
            net = tf.reshape(net, [batch_size, 7, 7, 128])
            # [bz, 14, 14, 64]
            net = tf.nn.relu(
                bn(deconv2d(net, 64, 4, 4, 2, 2, padding='SAME', name='g_dc3'),
                   is_training,
                   name='g_bn3'))
            # [bz, 28, 28, 1]
            out = tf.nn.sigmoid(
                deconv2d(net, 1, 4, 4, 2, 2, padding='SAME', name='g_dc4'))
            return out
Пример #10
0
def updateRoutingTable(message, source):
    global updateTable, routingTable
    if len(message) == 0 or message == '\x00':
        routingTable[:] = [d for d in routingTable if d.get('nextHop') != source or d.get('destination') != source]
        updateTable = True
        retrieveRoutingTable()
    else:
        numberOfRoutes = len(message) / 17
        routingTable[:] = [d for d in routingTable if d.get('nextHop') != source]
        for x in xrange(numberOfRoutes):
            route = message[17*x : 17 * (x+1)]
            cost = bn(route[16])
            email = route[:16]
            if email != sourceAddress:
                routingTable.append({
                    'destination': email,
                    'nextHop': source,
                    'metric': cost + 1
                })
            updateTable = True
        retrieveRoutingTable()
    pass
Пример #11
0
def parseMessage():
    while True:
        data, addr = sock.recvfrom(100)
        # MESSAGE TYPE 02 = Data, 04 = ACK
        if data[3] == '\x02':
            source = data[8:24]
            destination = data[24:40]

            if sourceAddress != destination:
                routePacket(data, destination)

            if data[40] in ['\x05', '\x0D', '\x01', '\x09']:
                message = ''
                if data[40] in ['\x05', '\x0D']:
                    message = parseChunking(data)
                else:
                    message = data[41:]

                sendAck(data[1:3], source, addr[0], addr[1])
                if message:
                    sendBack = False
                    if not any(d['nextHop'] == source for d in routingTable):
                        msg_list.insert(END, "Neighbor " + knownTable[source]['email'] + " connected")
                        neighbors.append(addr[0] + ":"+ str(addr[1]) + ":" + knownTable[source]['email'])
                        sendBack = True

                    if len(message) == 0 or message == '\x00':
                        neighborToRemove = addr[0] + ":" + str(addr[1]) + ":" + knownTable[source]['email']
                        idx = neighbors.index(neighborToRemove)
                        del neighbors[idx]

                    if data[40] in ['\x0D', '\x09']: message = Encryption.decrypt(message)
                    updateRoutingTable(message, source)
                    if sendBack:
                        sendRoutingTable(addr[0], addr[1], source)
                        

            elif data[40] in ['\x06', '\x0E', '\x02', '\x0A']:
                message = None
                if data[40] in ['\x06', '\x0E']:
                    message = parseChunking(data)
                else:
                    message = data[41:]

                sendAck(data[1:3], source, addr[0], addr[1])   
                if message is not None:

                    if data[40] in ['\x0E', '\x0A']: message = Encryption.decrypt(message)

                    messageType = message[0]
                    message = message[1:]
                    
                    if messageType == '\x02':
                        f = open('output.file', 'wb')
                        f.write(message)
                        f.close()
                        continue

                    from_email = knownTable[source]['email']
                    
                    msg_list.insert(END, from_email + ': ' + message)

        elif data[3] == '\x04':
            dest_addr = addr[0] + ":" + str(addr[1])
            packetConfId = bn(data[5:7])
            if dest_addr in ackHistory and packetConfId in ackHistory[dest_addr]:
                 del ackHistory[dest_addr][packetConfId]
    pass