Example #1
0
def make_dataflow_demo(env):
    ensure_load()

    # return feed_dict, extra_info
    def split_data(img, label):
        return dict(img=img[np.newaxis].astype('float32'))

    df = _mnist[1]  # use validation set actually
    df = flow.DictOfArrayDataFlow(df)
    df = flow.tools.cycle(df)
    df = flow.tools.ssmap(split_data, df)

    return df
Example #2
0
def make_dataflow_inference(env):
    ensure_load()
    batch_size = get_env('inference.batch_size')
    epoch_size = get_env('inference.epoch_size')

    df = _mnist[1]  # use validation set actually
    df = flow.DictOfArrayDataFlow(df)
    df = flow.tools.cycle(df)
    df = flow.BatchDataFlow(df, batch_size,
                            sample_dict={'img': np.empty(shape=(batch_size, 28, 28, 1), dtype='float32'), })
    df = flow.EpochDataFlow(df, epoch_size)

    return df
Example #3
0
def make_dataflow_demo(env):
    num_classes = get_env('dataset.nr_classes')
    ensure_load(num_classes)

    # return feed_dict, extra_info
    def split_data(img, label):
        return dict(img=img[np.newaxis].astype('float32')), dict(label=label)

    df = _cifar[1]  # use validation set actually
    df = flow.DictOfArrayDataFlow(df)
    df = flow.tools.cycle(df)
    df = flow.tools.ssmap(split_data, df)

    return df
def main_demo_infogan(env, func):
    net = env.network
    samples = net.zc_distrib.numerical_sample(net.zc_distrib_num_prior)
    df = {'zc': samples.reshape(samples.shape[0], 1, -1)}
    df = flow.DictOfArrayDataFlow(df)

    all_outputs = []
    for data in tqdm.tqdm(df, total=len(df), **get_tqdm_defaults()):
        res = func(**data)
        all_outputs.append(res['output'][0, :, :, 0])

    grid_desc = get_env('demo.infogan.grid_desc')
    final = image.image_grid(all_outputs, grid_desc)
    final = (final * 255).astype('uint8')
    image.imwrite('infogan.png', final)
Example #5
0
def make_dataflow_demo(env):
    reconstruct = get_env('demo.is_reconstruct', False)
    if reconstruct:
        ensure_load()

        # return feed_dict, extra_info
        def split_data(img, label):
            return dict(img=img[np.newaxis].astype('float32'))

        df = _mnist[1]  # use validation set actually
        df = flow.DictOfArrayDataFlow(df)
        df = flow.tools.cycle(df)
        df = flow.tools.ssmap(split_data, df)
    else:
        df = flow.EmptyDictDataFlow()

    return df
Example #6
0
def make_dataflow_inference(env):
    num_classes = get_env('dataset.nr_classes')
    ensure_load(num_classes)
    batch_size = get_env('inference.batch_size')
    epoch_size = get_env('inference.epoch_size')

    df = _cifar[1]  # use validation set actually
    df = flow.DictOfArrayDataFlow(df)
    df = flow.tools.cycle(df)
    df = flow.BatchDataFlow(df,
                            batch_size,
                            sample_dict={
                                'img':
                                np.empty(shape=(batch_size, _cifar_img_dim,
                                                _cifar_img_dim, 3),
                                         dtype='float32'),
                                'label':
                                np.empty(shape=(batch_size, ), dtype='int32')
                            })
    df = flow.EpochDataFlow(df, epoch_size)

    return df