예제 #1
0
 def layer_forward_hook(module, hook_inputs, hook_outputs=None):
     device = _extract_device(module, hook_inputs, hook_outputs)
     is_layer_tuple = (isinstance(hook_outputs, tuple) if
                       hook_outputs is not None else isinstance(
                           hook_inputs, tuple))
     if is_layer_tuple:
         return scattered_inputs_dict[device]
     return scattered_inputs_dict[device][0]
예제 #2
0
 def forward_hook(module, inp, out=None):
     device = _extract_device(module, inp, out)
     is_layer_tuple = (isinstance(out, tuple) if out is not None
                       else isinstance(inp, tuple))
     if device not in all_layer_inputs:
         raise AssertionError(
             "Layer input not placed on appropriate "
             "device. If using a DataParallel model, either provide the "
             "DataParallel model as forward_func or provide device ids"
             " to the constructor.")
     if not is_layer_tuple:
         return all_layer_inputs[device][0]
     return all_layer_inputs[device]
예제 #3
0
                def layer_forward_hook(module,
                                       hook_inputs,
                                       hook_outputs=None,
                                       layer_idx=0):
                    device = _extract_device(module, hook_inputs, hook_outputs)
                    is_layer_tuple = (
                        isinstance(hook_outputs, tuple)
                        # hook_outputs is None if attribute_to_layer_input == True
                        if hook_outputs is not None else isinstance(
                            hook_inputs, tuple))

                    if is_layer_tuple:
                        return scattered_inputs_dict[device][
                            num_outputs_cumsum[layer_idx]:num_outputs_cumsum[
                                layer_idx + 1]]

                    return scattered_inputs_dict[device][
                        num_outputs_cumsum[layer_idx]]