예제 #1
0
import tensorflow as tf
layers = tf.keras.layers
from tensorflow.contrib import autograph

import numpy as np

tf.enable_eager_execution()


def square_if_positive(x):
    if x > 0:
        x = x * x
    else:
        x = 0.0
    return x


print(autograph.to_code(square_if_positive))

tf_square_if_positive = autograph.to_graph(square_if_positive)

with tf.Graph().as_default():
    # The result works like a regular op: takes tensors in, returns tensors.
    # You can inspect the graph using tf.get_default_graph().as_graph_def()
    g_out1 = tf_square_if_positive(tf.constant(9.0))
    g_out2 = tf_square_if_positive(tf.constant(-9.0))
    with tf.Session() as sess:
        print('Graph results: %2.2f, %2.2f\n' %
              (sess.run(g_out1), sess.run(g_out2)))
예제 #2
0
from __future__ import division, print_function, absolute_import
import tensorflow as tf
import xgb_model_zzr2
import testvars2
import tensorflow as tf
layers = tf.keras.layers
from tensorflow.contrib import autograph

print(autograph.to_code(testvars2.XGBprocess))
예제 #3
0
파일: physics.py 프로젝트: olantwin/zfit
                                                      ((ANY_UPPER, ), )))
# TODO uncomment, dependency: bug in TF (31.1.19) # 25339 that breaks gradient of resource var in cond
# CrystalBall.register_analytic_integral(func=crystalball_integral, limits=crystalball_integral_limits)

if __name__ == '__main__':
    mu = ztf.constant(0)
    sigma = ztf.constant(0.5)
    alpha = ztf.constant(3)
    n = ztf.constant(1)
    # res = crystalball_func(np.random.random(size=100), mu, sigma, alpha, n)
    # int1 = crystalball_integral(limits=zfit.Space(obs='obs1', limits=(-3, 5)),
    #                             params={'mu': mu, "sigma": sigma, "alpha": alpha, "n": n})
    from tensorflow.contrib import autograph
    import matplotlib.pyplot as plt

    new_code = autograph.to_code(crystalball_integral)
    obs = zfit.Space(obs='obs1', limits=(-3, 1))
    cb1 = CrystalBall(mu, sigma, alpha, n, obs=obs)
    res = cb1.pdf(np.random.random(size=100))
    int1 = cb1.integrate(limits=(-0.01, 2), norm_range=obs)
    # tf.add_check_numerics_ops()

    x = np.linspace(-5, 1, num=1000)
    vals = cb1.pdf(x=x)
    y = zfit.run(vals)[0]
    plt.plot(x, y)
    plt.show()

    # print(new_code)
    print(zfit.run(res))
    print(zfit.run(int1))
예제 #4
0
pre_op_idx = -1
i = 0


def _path2dag(path, opt_ind, end_ind, cd_length, num_cells, dag, pre_op_idx,
              i):
    dag = []
    autograph.set_element_type(dag, tf.int32)
    for x in range(5):
        dag.append(x)
    # logger.info(path)
    # logger.info(np.reshape(dag, (self.num_cells, self.cd_length)))
    return dag


print(autograph.to_code(_path2dag))
'''
    for op in path:
        start_idx = i*cd_length
        if op == 0:
            dag[start_idx] = 2
        else:
            dag[start_idx+opt_ind] = op
            dag[start_idx+opt_ind-num_cells+pre_op_idx+1] = 1
            if pre_op_idx != -1:
                dag[(pre_op_idx+1)*cd_length-1] = 0
            dag[start_idx+end_ind] = 1
            pre_op_idx = i
        i += 1
'''
##################################fail#################################################


import tensorflow as tf
import xgb_model_zzr

from __future__ import division, print_function, absolute_import
  
import tensorflow as tf
layers = tf.keras.layers
from tensorflow.contrib import autograph




print(autograph.to_code(xgb_tree))














예제 #6
0
import numpy as np
import matplotlib.pyplot as plt

tf.enable_eager_execution()


def square_if_positive(x):
    if x > 0:
        x = x * x
    else:
        x = 0.0
    return x


print(autograph.to_code(square_if_positive))

print('Eager results: %2.2f, %2.2f' % (square_if_positive(
    tf.constant(9.0)), square_if_positive(tf.constant(-9.0))))

tf_square_if_positive = autograph.to_graph(square_if_positive)

with tf.Graph().as_default():
    # The result works like a regular op: takes tensors in, returns tensors.
    # You can inspect the graph using tf.get_default_graph().as_graph_def()
    g_out1 = tf_square_if_positive(tf.constant(9.0))
    g_out2 = tf_square_if_positive(tf.constant(-9.0))

    with tf.Session() as sess:
        print('Graph results: %2.2f, %2.2f\n' %
              (sess.run(g_out1), sess.run(g_out2)))