Beispiel #1
0
    def __init__(self, sequence_len, batch_size, vocab_size, embedding_size, filter_sizes, num_filters, visible_size, hidden_size, dropout=1.0, l2_reg=0.0, learning_rate=1e-2, params=None,embeddings=None,loss='svm',trainable=True,score_type='nn_output'):
        core.__init__(self,sequence_len,batch_size,vocab_size,embedding_size,filter_sizes,num_filters, visible_size, hidden_size, dropout,l2_reg,params,learning_rate,embeddings,loss,trainable,score_type)

        self.model_type = "Gen"
        self.reward  = tf.placeholder(tf.float32, shape=[None], name='reward')
        self.neg_index = tf.placeholder(tf.int32, shape=[None], name='neg_index')

        self.gan_score = -tf.abs(self.neg_score - self.pos_score)
        #self.gan_score = self.neg_score - self.pos_score

        self.batch_scores =tf.nn.softmax(self.gan_score) 
        self.prob = tf.gather(self.batch_scores,self.neg_index)
        self.gan_loss =  -tf.reduce_mean(tf.log(self.prob) *self.reward) +l2_reg* self.l2_loss
        #self.gan_loss =  -tf.reduce_sum(tf.log(tf.clip_by_value(self.prob,1e-12,tf.reduce_max(self.prob))) *self.reward) 
        
        self.global_step = tf.Variable(0, name="global_step", trainable=False)
        #optimizer = tf.train.AdamOptimizer(self.learning_rate)
        #grads_and_vars = optimizer.compute_gradients(self.gan_loss)
        #self.gan_updates = optimizer.apply_gradients(grads_and_vars, global_step=self.global_step)
        optimizer = tf.train.GradientDescentOptimizer(self.learning_rate)
        self.gan_updates = optimizer.minimize(self.gan_loss, global_step=self.global_step)



        # minize attention
        self.gans=-tf.reduce_mean(self.gan_score)
        self.dns_score=self.neg_score
        self.positive= tf.reduce_mean(self.pos_score)
        self.negative= tf.reduce_mean(self.neg_score)
Beispiel #2
0
 def __init__(self,_dataFilePath, _templateFilePath):
     core.__init__(self, _dataFilePath, _templateFilePath)        
     self.memorizedData = {}
     self.dataFilePath = _dataFilePath
     self.templateFilePath = _templateFilePath
     self.shellCommand=self.properties.get('shellCommand') or None
     if not self.shellCommand:
         self.error('Parameter shellCommand is REQUIRED in the properties file !!!')
         raise
Beispiel #3
0
 def __init__(self, _dataFilePath, _templateFilePath):
     core.__init__(self, _dataFilePath, _templateFilePath)
     self.min_random = properties.getInt('dummy.sleep.min', 0)
     self.max_random = properties.getInt('dummy.sleep.max', 0)
     if self.min_random > self.max_random:
         self.error(
             'Parameter dummy.sleep.min [%d] and dummy.sleep.max [%d] are not consistent ! '
             % (self.min_random, self.max_random))
         raise
     self.sleep = True if self.min_random > 0 and self.max_random > 0 else False
Beispiel #4
0
    def __init__(self,
                 sequence_len,
                 batch_size,
                 vocab_size,
                 embedding_size,
                 filter_sizes,
                 num_filters,
                 visible_size,
                 hidden_size,
                 dropout=1.0,
                 l2_reg=0.0,
                 learning_rate=1e-2,
                 params=None,
                 embeddings=None,
                 loss='svm',
                 trainable=True,
                 score_type='nn_output'):
        core.__init__(self, sequence_len, batch_size, vocab_size,
                      embedding_size, filter_sizes, num_filters, visible_size,
                      hidden_size, dropout, l2_reg, params, learning_rate,
                      embeddings, loss, trainable, score_type)
        self.model_type = 'Dis'
        with tf.name_scope('output'):
            if loss == 'svm':
                self.losses = tf.maximum(
                    0.0, 0.05 - (self.pos_score - self.neg_score))
                self.loss = tf.reduce_sum(
                    self.losses) + self.l2_reg * self.l2_loss
                self.reward = 2 * (
                    tf.sigmoid(0.05 - (self.pos_score - self.neg_score)))
                self.correct = tf.equal(0.0, self.losses)
            elif loss == 'log':
                self.losses = tf.log(
                    tf.sigmoid(self.pos_score - self.neg_score))
                self.loss = -tf.reduce_mean(
                    self.losses) + self.l2_reg * self.l2_loss
                self.reward = tf.reshape(
                    tf.log(tf.sigmoid(self.neg_score - self.pos_score) + 0.5),
                    [-1])
                self.correct = tf.greater(0.0, self.losses)

            self.positive = tf.reduce_mean(self.pos_score)
            self.negative = tf.reduce_mean(self.neg_score)
            #self.correct = tf.equal(0.0, self.losses)
            self.accuracy = tf.reduce_mean(tf.cast(self.correct, 'float'),
                                           name='accuracy')

        self.global_step = tf.Variable(0, name='global_step', trainable=False)
        optimizer = tf.train.GradientDescentOptimizer(self.learning_rate)
        #optimizer = tf.train.AdamOptimizer(self.learning_rate)
        self.updates = optimizer.minimize(self.loss,
                                          global_step=self.global_step)
Beispiel #5
0
 def __init__(self, _dataFilePath, _templateFilePath):
     core.__init__(self, _dataFilePath, _templateFilePath)
     self.min_random = grinder.getProperties().getInt('dummy.sleep.min', 0)
     self.max_random = grinder.getProperties().getInt('dummy.sleep.max', 0)
     if self.min_random > self.max_random:
         self.error(
             'Parameter dummy.sleep.min [%d] and dummy.sleep.max [%d] are not consistent ! '
             % (self.min_random, self.max_random))
         raise
     self.sleep = True if self.min_random > 0 and self.max_random > 0 else False
     properties = grinder.getProperties()
     self.displayReadResponse = properties.getBoolean(
         'displayReadResponse', False)
Beispiel #6
0
    def __init__(self, _dataFilePath, _templateFilePath):
        core.__init__(self, _dataFilePath, _templateFilePath)

        self.lastStmt = 'OTHER'
        self.db_type = None
        self.connections = {}
        str_conn = self.properties.get('db_connection') or None
        logger.info('db_connection=%s' % (str_conn))
        self.connection = None
        if str_conn:
            self.connection = self.getConnection(str_conn)
        if self.connection:
            self.connections[str_conn] = self.connection
            self.cursor = self.connection.cursor()
        self.alias = None
        self.dictBind = {}
Beispiel #7
0
    :param path: str, путь к файлу (без цифры)
    :return: str
    """
    i = 1
    while os.path.isfile(path):
        if i == 1:
            path += '.1'
        else:
            path = path[:-2] + '.' + str(i)
        i += 1
    return path


if __name__ == '__main__':
    core.__init__()  # инициализация модуля
    str_time = time.strftime('%b %Y %H:%M:%S', time.localtime())
    profile = cProfile.Profile()
    if core.config['MAIN']['cgitb'] == 'yes':  # включить вывод ошибок
        if core.config['MAIN']['tolog'] == 'yes':  # вывод в лог
            cgitb.enable(0, get_file(os.path.join(core.PATH_CGITB, str_time
                                                  + 'log')))
        else:
            cgitb.enable()
    if core.config['MAIN']['profile'] == 'yes':  # включить профилирование
        profile.enable()
    header, content, cookies = core.run(cgi.FieldStorage())  # получение данных
    # вывод заголовка
    for line in header:
        print(line)
    print(get_all(cookies))  # вывод куков (относятся к заголовку)
Beispiel #8
0
 def __init__(self, _dataFilePath, _templateFilePath):
     core.__init__(self, _dataFilePath, _templateFilePath)