Ejemplo n.º 1
0
    def test_module_export(self, tmpdir, input_example, module_name, df_type):
        """ Tests the module export.

            Args:
                tmpdir: Fixture which will provide a temporary directory.

                input_example: Input to be passed to TaylorNet.

                module_name: Name of the module (section in config file).

                df_type: Parameter denoting type of export to be tested.
        """
        # Create neural module instance.
        module = NeuralModule.import_from_config(
            "tests/configs/test_deploy_export.yaml", module_name)
        # Generate filename in the temporary directory.
        tmp_file_name = str(tmpdir.mkdir("export").join(module_name))
        input_example = input_example.cuda(
        ) if input_example is not None else input_example
        # Test export.
        self.__test_export_route(
            module=module,
            out_name=tmp_file_name,
            mode=df_type,
            input_example=input_example,
        )
Ejemplo n.º 2
0
    def test_incompatible_import_leaf_module(self, tmpdir):
        """
            Tests whether a particular module can instantiate another
            instance (a copy) by loading a configuration file.

            Args:
                tmpdir: Fixture which will provide a temporary directory.
        """

        # params = {"int": 123, "float": 12.4, "string": "ala ma kota", "bool": True}
        orig_module = TestNeuralModuleImport.SecondSimpleModule(["No", "way", "dude!"], None)

        # Generate filename in the temporary directory.
        tmp_file_name = str(tmpdir.mkdir("export").join("incompatible_import_leaf.yml"))
        # Export.
        orig_module.export_to_config(tmp_file_name)

        # This will actuall create an instance of SecondSimpleModule - OK.
        new_module = NeuralModule.import_from_config(tmp_file_name)
        # Compare class types.
        assert type(orig_module).__name__ == type(new_module).__name__

        # This will create an instance of SecondSimpleModule, not FirstSimpleModule - SO NOT OK!!
        with pytest.raises(ImportError):
            _ = TestNeuralModuleImport.FirstSimpleModule.import_from_config(tmp_file_name)
 def __call__(self, force_pt=False, *input, **kwargs):
     pt_call = len(input) > 0 or force_pt
     if pt_call:
         with t.no_grad():
             return self.forward(*input, **kwargs)
     else:
         return NeuralModule.__call__(self, **kwargs)
Ejemplo n.º 4
0
    def test_simple_import_root_neural_module(self, tmpdir):
        """
            Tests whether the Neural Module can instantiate a simple module by loading a configuration file.

            Args:
                tmpdir: Fixture which will provide a temporary directory.
        """

        # params = {"int": 123, "float": 12.4, "string": "ala ma kota", "bool": True}
        orig_module = TestNeuralModuleImport.FirstSimpleModule(123, 12.4, "ala ma kota", True)

        # Generate filename in the temporary directory.
        tmp_file_name = str(tmpdir.mkdir("export").join("simple_import_root.yml"))
        # Export.
        orig_module.export_to_config(tmp_file_name)

        # Import and create the new object.
        new_module = NeuralModule.import_from_config(tmp_file_name)

        # Compare class types.
        assert type(orig_module).__name__ == type(new_module).__name__

        # Compare objects - by its all params.
        param_keys = orig_module.init_params.keys()
        for key in param_keys:
            assert orig_module.init_params[key] == new_module.init_params[key]
Ejemplo n.º 5
0
    def __init__(self, name=None):
        NeuralModule.__init__(self, name)  # For NeuralModule API
        self._device = get_cuda_device(self.placement)

        # if 'batch_size' not in kwargs:
        #    logging.warning("No batch_size specified in the data layer. "
        #                    "Setting batch_size to 1.")
        #    kwargs['batch_size'] = 1

        # Set default values of variables used by trained/passed to DataLoader.
        # NOTE: That also means that those are parameters of DataLoader/trainer, not DataLayer.
        # Thus those fields will be removed from DataLayer and moved to trainer configuration
        # (when the time for that will come;))
        self._batch_size = 1
        self._num_workers = os.cpu_count()  # Use all CPUs by default.
        self._shuffle = False  # Don't shuffle by default.
Ejemplo n.º 6
0
    def __instantiate_modules(
        self,
        preprocessor_params,
        encoder_params,
        decoder_params,
        spec_augment_params=None,
    ):
        preprocessor = NeuralModule.deserialize(preprocessor_params)
        encoder = NeuralModule.deserialize(encoder_params)
        decoder = NeuralModule.deserialize(decoder_params)
        if hasattr(decoder, 'vocabulary'):
            self.__vocabulary = decoder.vocabulary
        else:
            self.__vocabulary = None

        if spec_augment_params is not None:
            spec_augmentation = NeuralModule.deserialize(spec_augment_params)
        else:
            spec_augmentation = None

        # Record all modules
        self._modules = []
        self._preprocessor = preprocessor
        self._spec_augmentation = spec_augmentation
        self._encoder = encoder
        self._decoder = decoder
        if spec_augmentation is not None:
            self._modules += [
                preprocessor, spec_augmentation, encoder, decoder
            ]
        else:
            self._modules += [preprocessor, encoder, decoder]

        # Create input and output ports
        self._input_ports = preprocessor.input_ports
        self._output_ports = decoder.output_ports
        self._output_ports['encoded_lengths'] = encoder.output_ports[
            'encoded_lengths']
        return self._preprocessor, self._spec_augmentation, self._encoder, self._decoder
Ejemplo n.º 7
0
 def __call__(self, force_pt=False, *input, **kwargs):
     pt_call = len(input) > 0 or force_pt
     if pt_call:
         # [inds] = kwargs.values()
         inds = kwargs["indices"]
         np_inds = inds.detach().cpu().numpy().tolist()
         result = []
         for lst in np_inds:
             sublst = []
             for tid in lst:
                 if tid != 1:
                     sublst.append(tid)
                 else:
                     break
             result.append(self._detokenizer(sublst))
         return result
     else:
         return NeuralModule.__call__(self, **kwargs)
Ejemplo n.º 8
0
    def test_simple_import_root_neural_module(self):
        """ Tests whether the Neural Module can instantiate a simple module by loading a configuration file."""

        # params = {"int": 123, "float": 12.4, "string": "ala ma kota", "bool": True}
        orig_module = NeuralModuleImportTest.FirstSimpleModule(123, 12.4, "ala ma kota", True)

        # Export.
        orig_module.export_to_config("/tmp/first_simple_import.yml")

        # Import and create the new object.
        new_module = NeuralModule.import_from_config("/tmp/first_simple_import.yml")

        # Compare class types.
        self.assertEqual(type(orig_module).__name__, type(new_module).__name__)

        # Compare objects - by its all params.
        param_keys = orig_module.init_params.keys()
        for key in param_keys:
            self.assertEqual(orig_module.init_params[key], new_module.init_params[key])
Ejemplo n.º 9
0
    def test_incompatible_import_leaf_module(self):
        """
            Tests whether a particular module can instantiate another
            instance (a copy) by loading a configuration file.
        """

        # params = {"int": 123, "float": 12.4, "string": "ala ma kota", "bool": True}
        orig_module = NeuralModuleImportTest.SecondSimpleModule(["No", "way", "dude!"], None)

        # Export.
        orig_module.export_to_config("/tmp/second_simple_import.yml")

        # This will actuall create an instance of SecondSimpleModule - OK.
        new_module = NeuralModule.import_from_config("/tmp/second_simple_import.yml")
        # Compare class types.
        self.assertEqual(type(orig_module).__name__, type(new_module).__name__)

        # This will create an instance of SecondSimpleModule, not FirstSimpleModule - SO NOT OK!!
        with self.assertRaises(ImportError):
            _ = NeuralModuleImportTest.FirstSimpleModule.import_from_config("/tmp/second_simple_import.yml")
Ejemplo n.º 10
0
 def __call__(self, force_pt=False, *input, **kwargs):
     pt_call = len(input) > 0 or force_pt
     if pt_call:
         # [inds] = kwargs.values()
         # np_inds = inds.detach().cpu().numpy().reshape(-1)
         # result = [self._ids2classes[i] for i in np_inds]
         # #result = list(map(lambda x: self._ids2classes[x], np_inds))
         # return result
         inds = kwargs["indices"]
         np_inds = inds.detach().transpose_(1, 0).cpu().numpy().tolist()
         result = []
         for lst in np_inds:
             sublst = []
             for tid in lst:
                 if tid != 1:
                     sublst.append(tid)
                 else:
                     break
             result.append(list(map(lambda x: self._ids2classes[x],
                                    sublst)))
         return [result]
     else:
         return NeuralModule.__call__(self, **kwargs)
Ejemplo n.º 11
0
 def __init__(self, detokenizer=None):
     NeuralModule.__init__(self)
     self._detokenizer = detokenizer
Ejemplo n.º 12
0
    def __init__(self, ids2classes=None):
        NeuralModule.__init__(self)

        if ids2classes is None:
            ids2classes = {}
        self._ids2classes = ids2classes
Ejemplo n.º 13
0
 def __init__(self, name=None):
     NeuralModule.__init__(self, name)  # For NeuralModule API
     self._device = get_cuda_device(self.placement)
     # Set module type.
     self._type = ModuleType.nontrainable
Ejemplo n.º 14
0
                                          f_name="cos",
                                          x_lo=-1,
                                          x_hi=1,
                                          batch_size=128)

# Instantiate a simple feed-forward, single layer neural network.
fx = nemo.tutorials.TaylorNet(dim=4)

# Instantitate loss.
mse_loss = nemo.tutorials.MSELoss()

# Export the model configuration.
fx.export_to_config("/tmp/taylor_net.yml")

# Create a second instance, using the parameters loaded from the previously created configuration.
fx2 = NeuralModule.import_from_config("/tmp/taylor_net.yml")

# Create a graph by connecting the outputs with inputs of modules.
x, y = dl()
# Please note that in the graph are using the "second" instance.
p = fx2(x=x)
loss = mse_loss(predictions=p, target=y)

# SimpleLossLoggerCallback will print loss values to console.
callback = nemo.core.SimpleLossLoggerCallback(
    tensors=[loss],
    print_func=lambda x: logging.info(f'Train Loss: {str(x[0].item())}'))

# Invoke the "train" action.
nf.train([loss],
         callbacks=[callback],
 def __init__(self):
     NeuralModule.__init__(self)
 def __call__(self, force_pt=False, *input, **kwargs):
     if force_pt:
         return self._loss_function(**kwargs)
     else:
         return NeuralModule.__call__(self, **kwargs)
Ejemplo n.º 17
0
 def __init__(self, **kwargs):
     # Neural Module API specific
     NeuralModule.__init__(self, **kwargs)
 def __init__(self):
     NeuralModule.__init__(self)  # For NeuralModule API
     self._device = get_cuda_device(self.placement)
    def __init__(self, ids2classes=None, **kwargs):
        NeuralModule.__init__(self, **kwargs)

        if ids2classes is None:
            ids2classes = {}
        self._ids2classes = ids2classes
 def __call__(self, *input, force_pt=False, **kwargs):
     pt_call = len(input) > 0 or force_pt
     if pt_call:
         return nn.Module.__call__(self, *input, **kwargs)
     else:
         return NeuralModule.__call__(self, **kwargs)
 def __init__(self, detokenizer=None, **kwargs):
     NeuralModule.__init__(self, **kwargs)
     # self._sp_decoder = self.local_parameters.get("sp_decoder", {})
     self._detokenizer = detokenizer
Ejemplo n.º 22
0
Archivo: toys.py Proyecto: benhoff/NeMo
 def __init__(self):
     # Neural Module API specific
     NeuralModule.__init__(self)
     # End of Neural Module API specific
     self._criterion = nn.CrossEntropyLoss()
Ejemplo n.º 23
0
Archivo: toys.py Proyecto: phymucs/NeMo
 def __init__(self):
     # Neural Module API specific
     NeuralModule.__init__(self)