예제 #1
0
def GetGradient(data_x_win,model,perturbation):
    y_pred = model.output
    y_true = K.variable(np.array(df_YY_actual.iloc[0]))
    #ten_x_scale = K.variable(np.array(data_x_win[0]))
    loss = keras.losses.mean_squared_error(y_true, y_pred)
    grads = K.gradients(loss,model.input)[0]
    x_adv = K.sign(grads)
    sess =K.get_session()
    init = tf.compat.v1.global_variables_initializer()
    sess.run(init)
    x_adv_0 = x_adv[0]

    adv = []
    if len(Y) != len(data_x_win):
        print("WARNING!!!! Unequal length of X and Y")
    #len(df_x_scale)
    for i in range(len(data_x_win)):
        adv_i = sess.run(x_adv_0, feed_dict={model.input:[data_x_win[i]],y_true:np.array(df_YY_actual.iloc[i])})
        if i%1000 == 0:
            print(i)
            print(datetime.datetime.now().isoformat())


        df_grd_i = pd.DataFrame(adv_i,columns = header)
        adv.append(np.array(df_grd_i))

    return adv
예제 #2
0
def _signed_sqrt(x):
    '''Calculate element-wise signed square-root.

    Args:
        x: input tensor.

    Returns:
        Element-wise signed square-root tensor.
    '''
    return keras_backend.sign(x) * keras_backend.sqrt(keras_backend.abs(x) + 1e-9)
예제 #3
0
 def loss(y_true, y_pred, quantile=quantile):
     from tensorflow.python.keras import backend as K
     e = y_pred - y_true
     Ie = (K.sign(e) + 1) / 2
     return K.mean(e * (Ie - quantile), axis=-1)
def _signed_sqrt(x):
    sign = Backend.sign(x)
    sqrt = Backend.sqrt(Backend.abs(x) + 1e-9)
    return sign * sqrt
예제 #5
0
 def scale_noise(self, noise):
     return K.sign(noise) * K.sqrt(K.abs(noise))
예제 #6
0
def signed_sqrt(x):
    return sign(x) * sqrt(abs(x) + 1e-9)