def synthetic_pattern_variable_program(include_types=True):
    """A program that tests product types.

  Args:
    include_types: If False, we omit types on the variables, requiring a type
        inference pass.

  Returns:
    program: `instructions.Program`.
  """
    block = instructions.Block([
        instructions.prim_op(["inp"], "many", lambda x: (x + 1,
                                                         (x + 2, x + 3))),
        instructions.prim_op(["many"], ["one", "two"], lambda x: x),
    ], instructions.halt_op())

    leaf = instructions.TensorType(np.int64, ())
    the_vars = {
        "inp": instructions.Type(leaf),
        "many": instructions.Type((leaf, (leaf, leaf))),
        "one": instructions.Type(leaf),
        "two": instructions.Type((leaf, leaf)),
    }

    if not include_types:
        _strip_types(the_vars)
    return instructions.Program(instructions.ControlFlowGraph([block]), [],
                                the_vars, ["inp"], "two")
示例#2
0
 def add_batch_dim_one_var(type_):
     return instructions.Type(
         instructions.pattern_map(
             lambda t: instructions.TensorType(t.dtype,
                                               (new_batch_dim, ) + t.shape),
             type_.tensors,
             leaf_type=instructions.TensorType))
示例#3
0
def type_of_pattern(val, backend, preferred_type=None):
    """Returns the `instructions.Type` of `val`.

  Args:
    val: Pattern of backend-specific `Tensor`s or a Python or numpy constant.
    backend: Object implementing required backend operations.
    preferred_type: `instructions.Type` to prefer, if `t` is a constant.

  Returns:
    vm_type: Pattern of `instructions.TensorType` describing `t`
  """
    def at_leaf(preferred_leaf_type, obj):
        """Pattern match at a leaf of the preferred_type pattern."""
        if preferred_leaf_type is None:
            return instructions.pattern_map(backend.type_of, obj)
        if isinstance(preferred_leaf_type, instructions.TensorType):
            return backend.type_of(obj, preferred_leaf_type.dtype)
        # Otherwise, preferred_leaf_type must be a (nested) list or tuple of
        # TensorType, while obj is not a list or a tuple (of anything).  In this
        # case, pattern_map2 should have raised an error, but we can defensively
        # raise an error here as well.
        msg = 'Type mismatch: Expected structured type {}, got object {}.'.format(
            preferred_leaf_type, obj)
        raise ValueError(msg)

    if preferred_type is None:
        preferred_type = instructions.Type(None)
    return instructions.pattern_map2(at_leaf,
                                     preferred_type.tensors,
                                     val,
                                     leaf_type=instructions.TensorType)
示例#4
0
    def module(self):
        """Returns the registered function definitions as an `instructions.Module`.

    Example:
    ```python
    ab = dsl.ProgramBuilder()

    with ab.function(...) as foo:
      ...  # Do stuff

    module = ab.module()
    ```

    Raises:
      ValueError: If invoked inside a function definition.

    Returns:
      module: The `instructions.Module` corresponding to all the definitions
        accumulated in this `ProgramBuilder`.
    """
        if self._blocks is not None:
            raise ValueError('Not finished defining function')
        msg = 'Internal invariant violation'
        assert self._locals is None, msg
        assert self._pending_after_else_block is None, msg
        var_defs = {str(var): inst.Type(None) for var in self._var_defs}
        return inst.Module(self._functions, var_defs)
示例#5
0
def shape_sequence_program(shape_sequence):
    """Program that writes into `answer` zeros having a sequence of shapes.

  This enables us to test that the final inferred shape is the broadcast of all
  intermediate shapes.

  Args:
    shape_sequence: The sequence of intermediate shapes.

  Returns:
    program: `instructions.Program` which returns an arbitrary value.
  """
    block_ops = []

    def op(shape, ans):
        return np.zeros(shape, dtype=np.array(ans).dtype),

    for shape in shape_sequence:
        # We use a partial instead of a lambda in order to capture a copy of shape.
        block_ops.append(
            instructions.prim_op(['ans'], ['ans'],
                                 functools.partial(op, shape)))
    shape_seq_block = instructions.Block(block_ops, instructions.halt_op())
    shape_seq_vars = {
        'ans': instructions.Type(None),
        instructions.pc_var: instructions.single_type(np.int64, ()),
    }
    return instructions.Program(
        instructions.ControlFlowGraph([shape_seq_block]), [], shape_seq_vars,
        ['ans'], ['ans'])
示例#6
0
    def testClosingOverTensorDoesntRaise(self):
        x = tf.constant(0.)

        def f(y):
            return y * x

        arg_types = [inst.Type([inst.TensorType(shape=[], dtype=np.float32)])]
        TF_BACKEND.run_on_dummies(f, arg_types)
def _merge_var(varname, obtained_type, inferred_types, backend):
  """Merges an updated auto-batching type for a single variable."""
  old_type = inferred_types[varname]
  new_type = instructions.pattern_map2(
      functools.partial(_merge_tensor_type, backend=backend),
      old_type.tensors, obtained_type,
      leaf_type=instructions.TensorType)
  inferred_types[varname] = instructions.Type(new_type)
  if old_type != inferred_types[varname]:
    log_debug('{}: {} -> {}'.format(varname, old_type, inferred_types[varname]))
def _strip_types(the_vars):
    for k in the_vars:
        if k != instructions.pc_var:
            the_vars[k] = instructions.Type(None)