Exemplo n.º 1
0
def next_batch_test2():
    train = DataSet('UCF', 'train1', 25)

# profile.run("next_batch_test2()")
# profile.run("next_batch_test()")


# for i in range(480):
#     res = train.next_batch(500)
#     print(len(res),str(train.index_in_epoch_video), train.index_frame)
#
# for i in range(10):
#     print(random.randint(0, 1))
Exemplo n.º 2
0
def next_batch_test():
    train = DataSet('UCF', 'train1', 25)
    images, labels = train.next_batch(500)
Exemplo n.º 3
0
# test.append(tmp2)
# tmp = numpy.array(test)
# print(tmp.shape)


arr = []
for i in range(2):
    arr.append(numpy.arange(10))
#
# for i in range(10):
#     numpy.random.shuffle(arr[0])
#     numpy.random.shuffle(arr[1])
#     print(arr)


train = DataSet('UCF', 'train1', 25)
begin_time = time.time()
images, labels = train.next_batch(2)
print(time.time() - begin_time, 's')
print(images.shape)
print(labels.shape)
print(labels[0])
plt.imshow(images[0])
plt.show()
def next_batch_test():
    train = DataSet('UCF', 'train1', 25)
    images, labels = train.next_batch(500)

def next_batch_test2():
    train = DataSet('UCF', 'train1', 25)
Exemplo n.º 4
0
import time
import tensorflow as tf
from scripts import DataSet
import numpy
import urllib.request
# import os
# os.environ["CUDA_VISIBLE_DEVICES"] = '0'
sys.path.append("D:/graduation_project/workspace/models/research/slim")
from scripts import my_vgg_16 as vgg

slim = tf.contrib.slim

checkpoint_path = 'D:/graduation_project/checkpoints/vgg_16.ckpt'
txtName = 'D:/graduation_project/workspace/dataset/UCF101_train_test_splits/train1_feature_files/vgg16_fc7/'

train = DataSet.DataSet('UCF', 'train3', 25, label_type='notonehot')
total_frame = train.total_frame
myinput = tf.placeholder(tf.float32, [None, 224, 224, 3])
logits, _ = vgg.vgg_16(myinput, num_classes=1000, is_training=False, jud='fc7')
init = slim.assign_from_checkpoint_fn(
    checkpoint_path, slim.get_variables_to_restore(include=["vgg_16"]))

beginTime = time.time()
pre = 0
post = 9999
status = True
with tf.Session() as sess:
    init(sess)
    for k in range(total_frame):
        if status is True:
            nowTxtName = txtName + str(pre) + '_' + str(post) + '.txt'
Exemplo n.º 5
0
probabilities = tf.nn.softmax(logits)
print(slim.get_model_variables())
# 从checkpoint读入网络权值
init_fn = slim.assign_from_checkpoint_fn(checkpoints_dir, slim.get_model_variables('vgg_16'))

show_detail = False
right_count = 0

with tf.Session() as sess:
    # 加载权值
    begin_time = time.time()
    init_fn(sess)
    print('Loading: %.2fs' % (time.time()-begin_time))

    test = DataSet.Test('UCF101', 'test1', 10)
    begin_time = time.time()
    for k in range(len(file_list)):
        file = file_list[k]
        ground_true = (file.split('.')[0]).split('_')[1]
        input_batch = test.test_an_video(source_folder + file)
        result = probabilities.eval(feed_dict={input: input_batch})

        if show_detail:
            print('=====================  ' + str(k) + '  ==================')
        seq = []
        for x in range(result.shape[0]):
            prob = result[x, 0:]
            sorted_inds = [i[0] for i in sorted(enumerate(-prob),
                                                key=lambda x: x[1])]
            if show_detail:
    def makeMatrix(self, ds, filterMin, filterMax, reorder):
        # getting data from FileLoader and creating the right format

        dataSetSort = DataSetSort()
        if reorder == 0:
            nodes = ds.getDoubleList(filterMin, filterMax, True)
        elif reorder == 1:
            dsCopy = DataSet(ds.getNodes())
            dsCopy.makeUndirectionalAdd()
            nodes = dsCopy.getDoubleList(filterMin, filterMax, True)
        elif reorder == 2:
            dataSetSort.DesConnectionSort(ds)
            nodes = ds.getDoubleList(filterMin, filterMax, True)
        elif reorder == 3:
            dataSetSort.DesStrengthSort(ds)
            nodes = ds.getDoubleList(filterMin, filterMax, True)
        elif reorder == 4:
            nodes = ds.distanceMatrix(True)
        elif reorder == 5:
            dataSetSort.robinsonSort(ds)
            nodes = ds.getDoubleList(filterMin, filterMax, True)

        names = ds.getNames()
        yNames = names.copy()
        yNames.reverse()

        df = pd.DataFrame(
            nodes,
            columns=yNames,
            index=names)
        df.index.name = 'X'
        df.columns.name = 'Y'

        # Prepare data.frame in the right format
        df = df.stack().rename("value").reset_index()


        # Making the plot html file
        output_file("matrixPlot.html")

        # Creating the array containing the colors
        colorList = []

        i = 0
        while i < 256:
            color = wc.rgb_to_hex((255-(i-10), 255-(i-20), 255-(i-30)))
            colorList.append(color)
            i = i + 10

        #colorList = [(23, 165, 137), (19, 141, 117), (40, 180, 99), (36, 113, 163)
        #             , (31, 97, 141), (17, 122, 101), (46, 134, 193), (34, 153, 84)]#,(202, 111, 30), (186, 74, 0)]
        #colors = []
        #for i in colorList:
        #    color = wc.rgb_to_hex(i)
        #    colors.append(color)

        colors = colorList

        # This part maps the colors at intervals
        mapper = LinearColorMapper(
            palette=colors, low=df.value.min(), high=df.value.max())

        # Creating the figure
        p = figure(
            plot_width=500,
            plot_height=500,
            x_range=list(df.X.drop_duplicates()),
            y_range=list(df.Y.drop_duplicates()),

            # Adding a toolbar
            #toolbar_location="right",
            #tools="hover,pan,box_zoom,undo,redo,reset,save",
            x_axis_location="above")

        node_hover_tool = HoverTool(tooltips=[("Name X Axis", "@X"), ("Name Y Axis", "@Y"), ("Relation Strength", "@value")])

        # plot.add_tools(node_hover_tool, BoxZoomTool(), ResetTool())
        p.add_tools(node_hover_tool, TapTool(), BoxSelectTool(), BoxZoomTool(), UndoTool(), RedoTool())
        p.toolbar_location = 'right'

        # Create rectangle for heatmap
        p.rect(
            x="X",
            y="Y",
            width=1,
            height=1,
            source=ColumnDataSource(df),
            line_color=None,
            fill_color=transform('value', mapper))

        # Add legend
        color_bar = ColorBar(
            color_mapper=mapper,
            location=(0, 0),
            ticker=BasicTicker(desired_num_ticks=len(colors)))

        if len(list(df.X.drop_duplicates())) > p.plot_width / 10:
            p.xaxis.visible = False
        else:
            pass
        if len(list(df.Y.drop_duplicates())) > p.plot_height / 10:
            p.yaxis.visible = False
        else:
            pass

        p.xaxis.major_label_orientation = 'vertical'
        p.add_layout(color_bar, 'right')
        return p
Exemplo n.º 7
0
import sys
import time
import tensorflow as tf
from scripts import DataSet
import numpy
import urllib.request
# import os
# os.environ["CUDA_VISIBLE_DEVICES"] = '0'
sys.path.append("D:/graduation_project/workspace/models/research/slim")
from scripts import my_vgg_16 as vgg
slim = tf.contrib.slim

checkpoint_path = 'D:/graduation_project/checkpoints/vgg_16.ckpt'
new_model_checkpoint_path = 'D:/graduation_project/workspace/checkpoints'

train = DataSet.DataSet('UCF', 'train1', 25)
input, label = train.next_batch(2)

logits, _ = vgg.vgg_16(input, num_classes=1000, is_training=False, jud='fc7')
init1 = slim.assign_from_checkpoint_fn(
    checkpoint_path, slim.get_variables_to_restore(include=["vgg_16"]))
# init1 = slim.assign_from_checkpoint_fn(new_model_checkpoint_path + '\\model.ckpt-107', slim.get_variables("vgg_16"))
# init2 = slim.assign_from_checkpoint_fn(new_model_checkpoint_path + '\\model.ckpt-108', slim.get_variables("vgg_16"))

with tf.Session() as sess:
    # init1(sess)
    # fc8_biases = slim.get_variables("vgg_16/fc8/biases")
    # fc7_biases = slim.get_variables("vgg_16/fc7/biases")
    # print(fc8_biases)
    # print('fc8 biases pre: ', sess.run(fc8_biases[0:10]))
    # print('fc7 biases pre: ', sess.run(fc7_biases[0:10]))