def __init__(self, path):
     self.dataset = database.get_dataset(path,
                                         vision=True,
                                         positions=True,
                                         speeds=True,
                                         tactile_map=True)
     self.iterator = self.dataset.make_initializable_iterator()
     self.next = self.iterator.get_next()
     self.initilalizer = self.iterator.initializer
import os
import datetime
from networks import predictor_maker, mse


parser = parsers.train_parser
args = parser.parse_args()


path = args.input
batch_size = args.batch_size
buffer_size = batch_size * 5
tactile_map_length = np.load(args.input + "/tactile_map/length.npy")


dataset_t0 = database.get_dataset(path, tactile_map=True)
dataset_t0 = dataset_t0.prefetch(5 * batch_size)
dataset_t1 = dataset_t0.skip(1)
dataset = tf.data.Dataset.zip((dataset_t0, dataset_t1))
dataset = dataset.batch(batch_size)
dataset = dataset.shuffle(buffer_size=buffer_size)
dataset = dataset.repeat()

iterator = dataset.make_initializable_iterator()
batch_t0, batch_t1 = iterator.get_next()

tactile_map = batch_t0["tactile_map"]
target_tactile_map = batch_t1["tactile_map"]

tactile_map_predictor = predictor_maker(tactile_map_length, tactile_map_length)
out = tactile_map_predictor(tactile_map)
Ejemplo n.º 3
0
from networks import predictor_maker, mse


parser = parsers.test_parser
args = parser.parse_args()

path = args.input
batch_size = args.batch_size
buffer_size = batch_size * 5
regex = r"joint_t0__joint_t1_[0-9]+_[0-9]+_[0-9]+_[0-9]+_[0-9]+_[0-9]+_nd([0-9]+)"
dirname = os.path.basename(os.path.normpath(args.network_path))
N_DISCRETE = int(re.match(regex, dirname).group(1))
regex = r"sf[0-9]+.?[0-9]*_re[0-9]+_ae[0-9]+_n([0-9]+)_chunk[0-9]+.tfr"
n_records = int(re.match(regex, os.listdir(path + '/positions')[0]).group(1))

dataset_t0 = database.get_dataset(path, positions=True, actions=True, vision=True)
dataset_t0 = dataset_t0.map(database.discretize_dataset(N_DISCRETE))
dataset_t1 = dataset_t0.skip(1)
dataset = tf.data.Dataset.zip((dataset_t0, dataset_t1))
dataset = dataset.batch(batch_size)

iterator = dataset.make_initializable_iterator()
batch_t0, batch_t1 = iterator.get_next()

discrete_positions = [tf.squeeze(x, axis=1) for x in tf.split(batch_t0["positions"], 4, axis=1)]
discrete_actions = [tf.squeeze(x, axis=1) for x in tf.split(batch_t0["actions"], 4, axis=1)]
discrete_positions_target = [tf.squeeze(x, axis=1) for x in tf.split(batch_t1["positions"], 4, axis=1)]

joint_predictors = [predictor_maker(2 * N_DISCRETE, N_DISCRETE) for a in discrete_actions]
inps = [tf.concat([p, a], axis=1) for p, a in zip(discrete_positions, discrete_actions)]
outs = [joint_predictor(inp) for inp, joint_predictor in zip(inps, joint_predictors)]
Ejemplo n.º 4
0
                    type=str,
                    default="../data_out/networks")
parser.add_argument('-r', '--restore-net', type=str, default=None)

args = parser.parse_args()


def uint8_to_float32(x):
    return tf.cast((x / 255) * 2 - 1, tf.float32)


def float32_to_uint8(x):
    return np.clip(255 * (x + 1) / 2, 0, 255).astype(np.uint8)


dataset = database.get_dataset(args.input)
dataset = dataset.map(uint8_to_float32)
dataset = dataset.shuffle(5000)
dataset = dataset.repeat()
dataset = dataset.batch(args.batch_size)
iterator = dataset.make_initializable_iterator()
inp = iterator.get_next()

keys = [
    "conv_outdim32_size8_stride4_act_tanh",
    "conv_outdim64_size10_stride5_act_tanh", "flatten",
    "fc_outdim500_act_tanh", "fc_outdim768_act_tanh", "reshape_6_8_16",
    "deconv_outdim64_size10_stride5_act_tanh",
    "deconv_outdim3_size8_stride4_act_none"
]
Ejemplo n.º 5
0
parser.add_argument('-d',
                    '--n-discrete',
                    type=int,
                    default=60,
                    help="Discretization precision.")

args = parser.parse_args()

path = args.input
batch_size = args.batch_size
buffer_size = batch_size * 5
N_DISCRETE = args.n_discrete
tactile_map_length = np.load(args.input + "/tactile_map/length.npy")

dataset_t0 = database.get_dataset(path,
                                  positions=True,
                                  actions=True,
                                  tactile_map=True)
dataset_t0 = dataset_t0.map(database.discretize_dataset(N_DISCRETE))
dataset_t1 = dataset_t0.skip(1)
dataset = tf.data.Dataset.zip((dataset_t0, dataset_t1))
dataset = dataset.repeat()
dataset = dataset.batch(batch_size)
dataset = dataset.shuffle(buffer_size=100)
dataset = dataset.prefetch(10)

iterator = dataset.make_initializable_iterator()
batch_t0, batch_t1 = iterator.get_next()

# size = tf.reduce_prod(tf.shape(batch_t0["positions"])[1:])
size = 4 * N_DISCRETE
discrete_positions = tf.reshape(batch_t0["positions"], (-1, size))
Ejemplo n.º 6
0
import parsers
import time
import os
import datetime
from networks import mse


parser = parsers.train_parser
args = parser.parse_args()


path = args.input
batch_size = args.batch_size
buffer_size = batch_size * 5

dataset = database.get_dataset(path, vision=True)
dataset = dataset.map(database.vision_to_float32)
dataset = dataset.prefetch(5 * batch_size)
dataset = dataset.shuffle(buffer_size=buffer_size)
dataset = dataset.batch(batch_size)
dataset = dataset.repeat()

iterator = dataset.make_initializable_iterator()
batch = iterator.get_next()

size = np.prod(batch["vision"].get_shape().as_list()[1:])
vision = tf.reshape(batch["vision"], [-1, size])

W1 = tf.Variable(tf.truncated_normal(shape=(size, 300), stddev=0.01))
B1 = tf.Variable(tf.zeros(shape=(300,)))
layer1 = tf.nn.relu(tf.matmul(vision, W1) + B1)