示例#1
0
 def parse_layer(to_parse):
     """
     Takes a given scapy layer object and returns a Geneva Layer object.
     """
     for layer in SUPPORTED_LAYERS:
         if layer.name_matches(to_parse.name):
             return layer(to_parse)
示例#2
0
 def gen_random():
     """
     Generates a possible random protocol, field, and value.
     """
     # layer is a Geneva Layer class - to instantiate it, we must give it a layer
     # to use. Every Geneva Layer stores the underlying scapy layer it wraps,
     # so simply invoke that as a default.
     layer = random.choice(SUPPORTED_LAYERS)
     layer_obj = layer(layer.protocol())
     field, value = layer_obj.gen_random()
     return layer.protocol, field, value
def network_arm(inputs):
    with tf.variable_scope("conv1"):
        o_conv1 = layer(inputs, 16, [5, 5])
    with tf.variable_scope("res1"):
        o_res1 = residual(o_conv1, 16, 1)
    with tf.variable_scope("res2"):
        o_res2 = residual(o_res1, 16, 1)
    with tf.variable_scope("res3"):
        o_res3 = residual(o_res2, 32, 2)
    with tf.variable_scope("res4"):
        o_res4 = residual(o_res3, 32, 1)
    with tf.variable_scope("res5"):
        o_res5 = residual(o_res4, 32, 1)
    with tf.variable_scope("res6"):
        o_res6 = residual(o_res5, 64, 2)
    with tf.variable_scope("res7"):
        o_res7 = residual(o_res6, 64, 1)
    with tf.variable_scope("res8"):
        o_res8 = residual(o_res7, 64, 1)
    with tf.variable_scope("fc"):
        fc = layer(o_res8, 1, [1,1])
    with tf.variable_scope("flat"):
        return flatten_multi(fc)
示例#4
0
    def parse(cls, str_protocol, field, value):
        """
        Parses a given value for a given field of a given protocool.

        Raises AssertionError if the protocol is not present.
        """
        parsing_layer = None
        for layer in SUPPORTED_LAYERS:
            if layer.name_matches(str_protocol):
                parsing_layer = layer(None)

        assert parsing_layer, "Given protocol %s is not permitted." % str_protocol

        return parsing_layer.parse(field, value)
示例#5
0
def setupImages(chunkSize):  # Setting up all the images
    textureLayers = layer()

    textureLayers.groundLayerDic = loadImage(
        chunkSize, textureLayers.groundLayerDic,
        "data/images/textures/groundLayer/")
    textureLayers.surfaceLayerDic = loadImage(
        chunkSize, textureLayers.surfaceLayerDic,
        "data/images/textures/surfaceLayer/")
    textureLayers.objectLayerDic = loadImage(
        chunkSize, textureLayers.objectLayerDic,
        "data/images/textures/objectLayer/")

    return textureLayers
示例#6
0
    return tf.reshape(out, [-1, seq_len, out_width, out_width, out_features])


# In[5]:

print("Creating computation graph", flush=True)

seq_len = 100
batch_size = 2

x_h = tf.placeholder(tf.float32, shape=[None, seq_len, 64, 64, 2])
x_m = tf.placeholder(tf.float32, shape=[None, seq_len, 64, 64, 2])
y = tf.placeholder(tf.float32, shape=[None, 20])

# Convolutional layers, hand
b_conv1_h, w_conv1_h, h_conv1_h, o_conv1_h = layer(x_h, 16, [5, 5])
o_res1_h = res_layer(o_conv1_h, 16, 1)
o_res2_h = res_layer(o_res1_h, 16, 1)
o_res3_h = res_layer(o_res2_h, 32, 2)
o_res4_h = res_layer(o_res3_h, 32, 1)
o_res5_h = res_layer(o_res4_h, 32, 1)
o_res6_h = res_layer(o_res5_h, 64, 2)
o_res7_h = res_layer(o_res6_h, 64, 1)
o_res8_h = res_layer(o_res7_h, 64, 1)
_, _, _, o_h = layer(o_res8_h, 1, [1, 1])
#flat_h = flatten(tf.squeeze(o_h))

flat_h = flatten_multi(o_h)
#b_fc1_h, w_fc1_h, h_fc1_h = dense_multi(flat_h, int(flat_h.get_shape()[2]), 256, 0.1, 0.02)

# Convolutional layers, main
示例#7
0
  def buildit(
              self,
              build_params, 
              flagy  = True, 
  	):
          
      self.learning_rate   = build_params [ "learning_rate" ]
      self.n_hidden_enc    = build_params [ "n_hidden_enc" ]
      self.n_hidden_dec    = build_params [ "n_hidden_dec" ]
      self.cost_fun        = build_params [ "cost_fun" ]
      self.activation      = build_params [ "activation" ]
      self.tied_weights    = build_params [ "tied_weights" ]   
      self.LR_decay        = build_params [ "LR_decay" ]	                
      self.begin_mom       = build_params [ "begin_mom" ]	
      self.end_mom         = build_params [ "end_mom" ]		
      self.mom_thrs        = build_params [ "mom_thrs" ]		
      
      print "...building the network"
      
      #allocate symbolic variables for data
      x = T.matrix('x')  
      
      next_layer_input = x
      layers = []
      enc_layers = []
      dec_layers = []
      curr_in = self.img_ht * self.img_wdt
             
      self.params = []
      
      for i in xrange(len(self.n_hidden_enc)):
          if flagy is True:
              print "..Hey! we are creating an encoding layer " + str(i)
           
          curr_out = self.n_hidden_enc[i]  
          enc_layers.append(layer(input = next_layer_input,
                                  n_in = curr_in,
                                  n_out = curr_out,
                                  activation = self.activation[i],
                                  numpy_rng = self.numpy_rng,
                                  W = None,
                                  flagy = flagy,
                                   ))
          self.params.extend(enc_layers[-1].params)                       
          next_layer_input =  enc_layers[-1].output
          curr_in = curr_out
           
      self.n_hidden_dec.append(self.img_ht * self.img_wdt)
      for i in xrange(len(self.n_hidden_dec)-1):
          if flagy is True:
              print "...Hey again! we are creating a decoding layer"  +  str(i)
          curr_in  = self.n_hidden_dec[i]
          curr_out = self.n_hidden_dec[i+1]
          dec_layers.append(layer(input = next_layer_input,
                                  n_in = curr_in,
                                  n_out = curr_out,
                                  activation = self.activation[i],
                                  numpy_rng = self.numpy_rng,
                                  W = None if self.tied_weights is False else enc_layers[len(self.n_hidden_enc)-i-1].params[0].T,
                                  flagy = flagy,
                                   ))
          next_layer_input = dec_layers[-1].output
          
          if self.tied_weights is False:
              self.params.extend(dec_layers[-1].params)
          
      z = next_layer_input
      self.z = z
          
      L = - T.sum(x * T.log(z) + (1 - x) * T.log(1 - z), axis=1)
      cce = T.mean(L)
      L2 = T.sum((x - z) ** 2,axis=1 )
      rmse = T.sqrt(T.mean(L2))
      
      if self.cost_fun == 'rmse' :
          self.cost = rmse
      elif self.cost_fun == 'cce' :
          self.cost = cce
      else :
          print " Enter a known cost function"
          
      if flagy is True:
          print "... estimating gradients"
      gradients = []      
      for param in self.params: 
          gradient = T.grad( self.cost ,param)
          gradients.append ( gradient )
      velocities = []
      for param in self.params:
          velocity = theano.shared(numpy.zeros(param.get_value(borrow=True).shape,dtype=theano.config.floatX))
          velocities.append(velocity)
       
      epoch = T.scalar()
      #pdb.set_trace();   
      self.mom = ifelse(epoch <= self.mom_thrs,
          self.begin_mom*(1.0 - epoch/self.mom_thrs) + self.end_mom*(epoch/self.mom_thrs),
          self.end_mom) 
      self.eta = theano.shared(numpy.asarray(self.learning_rate,dtype=theano.config.floatX))
      updates = OrderedDict()                           
      for param, gparam,velocity in zip(self.params, gradients,velocities):
          updates[velocity] = self.mom * velocity - (1.-self.mom) * self.eta * gparam  
          updates[param] = param + updates[velocity]
      self.get_mom = theano.function(
          inputs = [epoch],
          outputs = self.mom
      )
    
             
      index = T.lscalar() 
      
      self.train_set_x = load_images ('train')
      self.test_set_x  = load_images ('test')
      self.valid_set_x = load_images ('valid')
      
      self.train_ae = theano.function(
          inputs = [index,epoch],
          outputs = self.cost,
          updates=updates,
          givens={
              x: self.train_set_x[index * self.batch_size: (index + 1) * self.batch_size] #slicing g
          }
      )
     
      self.test_ae = theano.function(
          inputs = [index],
          outputs = self.cost,
         givens={
              x: self.test_set_x[index * self.batch_size: (index + 1) * self.batch_size] #slicing g
          }
      )
      
      self.recon_op = theano.function(
          inputs = [index],
          outputs = self.z,
          givens = {
              x: self.train_set_x[index * self.batch_size: (index + 1) * self.batch_size]
          }
      )       
 
      self.decay_learning_rate = theano.function(
             inputs=[],          # Just updates the learning rates. 
             updates={self.eta: self.eta -  self.eta * self.LR_decay }
              )
示例#8
0
 def __call__(self, input_):
     result = input_
     for layer in self.layers:
         result = layer(result)
     return result
示例#9
0
import deal,transfer,layers

path=r'C:\Users\mimota\OneDrive\python\机器学习\lihongyi_hw_1\train.csv'
matrix=deal.deal(path)
x_9,y_10=transfer.transfer(matrix)
layers.layer(x_9,y_10)

示例#10
0
def _forward(layer,train_data,weight,bias):
    layer[0] = layers.layer(train_data,weight[0],bias[0])
    for la in range(1,len(weight)):
        layer[la] = layers.layer(layer[la-1]['out'],weight[la],bias[la])
    return layer