示例#1
0
文件: download.py 项目: edupard/csnet
def check_and_download_vgg_checkpoint():
    if not tf.gfile.Exists(checkpoints_dir):
        tf.gfile.MakeDirs(checkpoints_dir)

        from models.research.slim.datasets import dataset_utils

        url = "http://download.tensorflow.org/models/vgg_16_2016_08_28.tar.gz"
        dataset_utils.download_and_uncompress_tarball(url, checkpoints_dir)
示例#2
0
def main():
    """Runs the download and conversion operation."""
    args = _parse_args()
    dataset_dir = args.data_dir

    if not tf.gfile.Exists(dataset_dir):
        tf.gfile.MakeDirs(dataset_dir)

    training_filename = _get_output_filename(dataset_dir, 'train')
    testing_filename = _get_output_filename(dataset_dir, 'test')

    if tf.gfile.Exists(training_filename) and tf.gfile.Exists(
            testing_filename):
        print('Dataset files already exist. Exiting without re-creating them.')
        return

    dataset_utils.download_and_uncompress_tarball(_DATA_URL, dataset_dir)

    # First, process the training data:
    with tf.python_io.TFRecordWriter(training_filename) as tfrecord_writer:
        offset = 0
        for i in range(_NUM_TRAIN_FILES):
            filename = os.path.join(dataset_dir, 'cifar-10-batches-py',
                                    'data_batch_%d' % (i + 1))  # 1-indexed.
            offset = _add_to_tfrecord(filename, tfrecord_writer, offset)

    # Next, process the testing data:
    with tf.python_io.TFRecordWriter(testing_filename) as tfrecord_writer:
        filename = os.path.join(dataset_dir, 'cifar-10-batches-py',
                                'test_batch')
        _add_to_tfrecord(filename, tfrecord_writer)

    # Finally, write the labels file:
    labels_to_class_names = dict(zip(range(len(_CLASS_NAMES)), _CLASS_NAMES))
    dataset_utils.write_label_file(labels_to_class_names, dataset_dir)

    _clean_up_temporary_files(dataset_dir)
    print('\nFinished converting the Cifar10 dataset!')
    for index in range(start_index, stop_index):
        prepared_images.append(preprocess(X[index]))
        labels.append(y[index])

    X_batch = np.stack(prepared_images)
    y_batch = np.array(labels, dtype=np.int32)

    return X_batch, y_batch


if not tf.gfile.Exists("models/cnn"):
    tf.gfile.MakeDirs("models/cnn")

if not os.path.exists("models/cnn/inception_v3.ckpt"):
    dataset_utils.download_and_uncompress_tarball(
        "http://download.tensorflow.org/models/inception_v3_2016_08_28.tar.gz",
        "models/cnn")

if not os.path.exists("tmp/faces"):
    os.makedirs("tmp/faces")

if not os.path.exists("tmp/faces/lfw-deepfunneled.tgz"):
    dataset_utils.download_and_uncompress_tarball(
        "http://vis-www.cs.umass.edu/lfw/lfw-deepfunneled.tgz", "tmp/faces")

class_mapping = {}
class_images = {}
dir = enumerate(os.listdir("images/faces/"))

for index, directory in dir:
    a = directory.split(" ")
示例#4
0
from os.path import isfile, isdir
from tqdm import tqdm

data_dir = 'data/'
url = "http://download.tensorflow.org/models/inception_v3_2016_08_28.tar.gz"
checkpoints_dir = "model_checkpoint/"

nrof_labelled = 1000

if not isdir(data_dir):
    raise Exception("Data directory doesn't exist!")

if not tf.gfile.Exists(checkpoints_dir):
    tf.gfile.MakeDirs(checkpoints_dir)

dataset_utils.download_and_uncompress_tarball(url, checkpoints_dir)


class DLProgress(tqdm):
    last_block = 0

    def hook(self, block_num=1, block_size=1, total_size=None):
        self.total = total_size
        self.update((block_num - self.last_block) * block_size)
        self.last_block = block_num


# Load the training and testing datasets
trainset = loadmat(data_dir +
                   'train_64x64.mat')  # read the images as .mat format
testset = loadmat(data_dir + 'test_64x64.mat')