Exemplo n.º 1
0
def DetectInit():
    global sess, model, mc

    detect_net = 'squeezeDet'
    checkpoint = '/home/ubuntu/catkin_ws/src/robo_perception/scripts/weights/model.ckpt-99999'


    assert detect_net == 'squeezeDet' or detect_net == 'squeezeDet+', \
        'Selected nueral net architecture not supported'

    tf.Graph().as_default()
    # Load model
    if detect_net == 'squeezeDet':
        mc = kitti_squeezeDet_config()
        mc.BATCH_SIZE = 1
        # model parameters will be restored from checkpoint
        mc.LOAD_PRETRAINED_MODEL = False
        model = SqueezeDet(mc, '0')
    elif detect_net == 'squeezeDet+':
        mc = kitti_squeezeDetPlus_config()
        mc.BATCH_SIZE = 1
        mc.LOAD_PRETRAINED_MODEL = False
        model = SqueezeDetPlus(mc, '0')

    saver = tf.train.Saver(model.model_params)
    # Use jit xla
    gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.5)
    config = tf.ConfigProto(allow_soft_placement=True, gpu_options=gpu_options)
    config.graph_options.optimizer_options.global_jit_level = tf.OptimizerOptions.ON_1
    #with tf.Session(config=config) as sess:
    sess = tf.Session(config=config) 
    saver.restore(sess, checkpoint)
Exemplo n.º 2
0
def load_graph(graph_file):
    graph = tf.Graph()
    with graph.as_default():
        od_graph_def = tf.GraphDef()
        with tf.gfile.GFile(graph_file, 'rb') as fid:
            serialized_graph = fid.read()
            od_graph_def.ParseFromString(serialized_graph)
            tf.import_graph_def(od_graph_def, name='')
    return graph
Exemplo n.º 3
0
    def __init__(self):
        #TODO load classifier
        # pass
        self.model = tf.Graph()

        # create a context manager that makes this model the default one for
        # execution
        with self.model.as_default():
            # initialize the graph definition
            graphDef = tf.GraphDef()

            # load the graph from disk
            with tf.gfile.GFile("/home/student/Desktop/CarND-Capstone/ros/src/tl_detector/light_classification/graph_optimized.pb", "rb") as f:
                serializedGraph = f.read()
                graphDef.ParseFromString(serializedGraph)
                tf.import_graph_def(graphDef, name="")

            # create a session to perform inference
            self.sess = tf.Session(graph=self.model)
Exemplo n.º 4
0
    1,
    layer1_hidden_size,
)))
wx_b = tf.matmul(place_x, w) + b
layer1 = activate(wx_b)  # 使用1/(1+x)效果更好,使用sigmoid反倒不好
w = tf.variable(rand((layer1_hidden_size, 3)))
b = tf.variable(rand((1, 3)))
wx_b = tf.matmul(layer1, w) + b
wx_b_flat = tf.reshape(wx_b, (-1, ))
logits = tf.softmax(wx_b_flat)
argmax = tf.Tensor(np.array(tf.argmax(logits)))
accuracy = tf.equal(argmax, place_y)
loss = tf.sparse_cross_entropy(logits, place_y)
learn_rate = tf.const(0.01)
train_op = tf.Optimizer(learn_rate).minimize(loss)
g = tf.Graph([train_op, loss, accuracy])
g.desc()
for epoch in range(10):
    print(epoch, '=' * 20)
    for batch_x, batch_y in zip(train_x, train_y):
        while 1:
            # 东一榔头西一棒槌学不好,必须要先把一个学会再学另外一个
            _, acc, lo = g.run([train_op, accuracy, loss],
                               feed_dict={
                                   place_x: batch_x,
                                   place_y: batch_y,
                               })
            print(acc, lo)
            if acc == 1:
                break
print('train over', '-' * 10)
Exemplo n.º 5
0
import tf as tf

g = tf.Graph()

with g.as_default():
    c = tf.constant(5.0)
    assert c.graph is g

with tf.Graph().as_default() as g:
    c = tf.constant(5.0)
    assert c.graph is g

Exemplo n.º 6
0
######### Set model here ############
MODEL_NAME = 'modelo_congelado'
# By default models are stored in data/models/
MODEL_PATH = os.path.join(os.path.dirname(sys.path[0]), 'data', 'models',
                          MODEL_NAME)
# Path to frozen detection graph. This is the actual model that is used for the object detection.
PATH_TO_CKPT = MODEL_PATH + '/frozen_inference_graph.pb'
######### Set the label map file here ###########
LABEL_NAME = 'mscoco_label_map.pbtxt'
# By default label maps are stored in data/labels/
PATH_TO_LABELS = os.path.join(os.path.dirname(sys.path[0]), 'data', 'labels',
                              LABEL_NAME)
######### Set the number of classes here #########
NUM_CLASSES = 1

detection_graph = tf.Graph()
with detection_graph.as_default():
    od_graph_def = tf.GraphDef()
    with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid:
        serialized_graph = fid.read()
        od_graph_def.ParseFromString(serialized_graph)
        tf.import_graph_def(od_graph_def, name='')

## Loading label map
# Label maps map indices to category names, so that when our convolution network predicts `5`,
# we know that this corresponds to `airplane`.  Here we use internal utility functions,
# but anything that returns a dictionary mapping integers to appropriate string labels would be fine
label_map = label_map_util.load_labelmap(PATH_TO_LABELS)
categories = label_map_util.convert_label_map_to_categories(
    label_map, max_num_classes=NUM_CLASSES, use_display_name=True)
category_index = label_map_util.create_category_index(categories)