コード例 #1
0
def get_minibatch(dataset, sp_idx, batch_size,
                  start_idx, end_idx, pad_idx,
                  nb_ios, shuffle=True, volatile_vars=False):
    """Prepare minibatch."""

    # Prepare the grids
    grid_descriptions = dataset["sources"][sp_idx:sp_idx+batch_size]
    inp_grids = []
    out_grids = []
    inp_worlds= []
    out_worlds= []
    inp_test_worlds = []
    out_test_worlds = []
    for sample in grid_descriptions:
        if shuffle:
            random.shuffle(sample)
        sample_inp_grids = []
        sample_out_grids = []
        sample_inp_worlds = []
        sample_out_worlds = []
        sample_test_inp_worlds = []
        sample_test_out_worlds = []
        for inp_grid_desc, out_grid_desc in sample[:nb_ios]:

            # Do the inp_grid
            inp_grid = grid_desc_to_tensor(inp_grid_desc)
            # Do the out_grid
            out_grid = grid_desc_to_tensor(out_grid_desc)

            sample_inp_grids.append(inp_grid)
            sample_out_grids.append(out_grid)
            sample_inp_worlds.append(World.fromPytorchTensor(inp_grid))
            sample_out_worlds.append(World.fromPytorchTensor(out_grid))
        for inp_grid_desc, out_grid_desc in sample[nb_ios:]:
            # Do the inp_grid
            inp_grid = grid_desc_to_tensor(inp_grid_desc)
            # Do the out_grid
            out_grid = grid_desc_to_tensor(out_grid_desc)
            sample_test_inp_worlds.append(World.fromPytorchTensor(inp_grid))
            sample_test_out_worlds.append(World.fromPytorchTensor(out_grid))

        sample_inp_grids = torch.stack(sample_inp_grids, 0)
        sample_out_grids = torch.stack(sample_out_grids, 0)
        inp_grids.append(sample_inp_grids)
        out_grids.append(sample_out_grids)
        inp_worlds.append(sample_inp_worlds)
        out_worlds.append(sample_out_worlds)
        inp_test_worlds.append(sample_test_inp_worlds)
        out_test_worlds.append(sample_test_out_worlds)
    inp_grids = Variable(torch.stack(inp_grids, 0), volatile=volatile_vars)
    out_grids = Variable(torch.stack(out_grids, 0), volatile=volatile_vars)

    # Prepare the target sequences
    targets = dataset["targets"][sp_idx:sp_idx+batch_size]

    lines = [
        [start_idx] + line for line in targets
    ]
    lens = [len(line) for line in lines]
    max_len = max(lens)

    # Drop the last element, it should be the <end> symbol for all of them
    # padding for all of them
    input_lines = [
        line[:max_len-1] + [pad_idx] * (max_len - len(line[:max_len-1])-1) for line in lines
    ]
    # Drop the first element, should always be the <start> symbol. This makes
    # everything shifted by one compared to the input_lines
    output_lines = [
        line[1:] + [pad_idx] * (max_len - len(line)) for line in lines
    ]

    in_tgt_seq = Variable(torch.LongTensor(input_lines), volatile=volatile_vars)
    out_tgt_seq = Variable(torch.LongTensor(output_lines), volatile=volatile_vars)

    return inp_grids, out_grids, in_tgt_seq, input_lines, out_tgt_seq, \
        inp_worlds, out_worlds, targets, inp_test_worlds, out_test_worlds
コード例 #2
0
basic instructions that Karel can execute as well as the conditions on the
world that he can test. In this implementation, all Karel commands should be
sandwiched by the pair of begin_karel_program() and end_karel_program()
statements.

    Author: Sonny Chan
    Date:   August 2018
"""

import sys

from karel import constants, drawing
from karel.world import World
from karel.editor import Editor

_world = World()
_karel = _world.karel

_wait = 250.0
_allow_edit = False

# location increments for moving north, east, south, and west, respectively
_step_avenue = [0, 1, 0, -1]
_step_street = [1, 0, -1, 0]


def begin_karel_program():
    global _karel
    global _wait

    # if first command line argument is specific, assume it is a world file