Esempio n. 1
0
 def test_parameter_sharing_nested_scopes(self):
     # Test parameter sharing
     with scope.NameScope('global_scope'):
         with ParameterSharing({'model_b': 'model_a'}):
             param_global = parameter_sharing_context.get_parameter_name('w')
             self.assertEquals(param_global, 'global_scope/w')
             # This scope is overridden to match 'model_a'
             with scope.NameScope('model_b'):
                 with ParameterSharing({'shared_scope': ''}):
                     param_4 = parameter_sharing_context.get_parameter_name(
                         'w')
                     self.assertEquals(param_4, 'global_scope/model_a/w')
                     with scope.NameScope('shared_scope'):
                         param_5 = parameter_sharing_context.\
                             get_parameter_name('w')
                         self.assertEquals(param_5, 'global_scope/model_a/w')
             # This scope is supposed to have not sharing
             with scope.NameScope('model_c'):
                 with ParameterSharing({'shared_scope': ''}):
                     param_4 = parameter_sharing_context.get_parameter_name(
                         'w')
                     self.assertEquals(param_4, 'global_scope/model_c/w')
                     with scope.NameScope('shared_scope'):
                         param_5 = parameter_sharing_context.\
                             get_parameter_name('w')
                         self.assertEquals(param_5, 'global_scope/model_c/w')
 def test_parameter_sharing_nested_scopes(self):
     # Test parameter sharing
     with scope.NameScope('global_scope'):
         with ParameterSharing({'model_b': 'model_a'}):
             param_global = parameter_sharing_context.get_parameter_name(
                 'w')
             self.assertEquals(param_global, 'global_scope/w')
             # This scope is overridden to match 'model_a'
             with scope.NameScope('model_b'):
                 with ParameterSharing({'shared_scope': ''}):
                     param_4 = parameter_sharing_context.get_parameter_name(
                         'w')
                     self.assertEquals(param_4, 'global_scope/model_a/w')
                     with scope.NameScope('shared_scope'):
                         param_5 = parameter_sharing_context.\
                             get_parameter_name('w')
                         self.assertEquals(param_5,
                                           'global_scope/model_a/w')
             # This scope is supposed to have not sharing
             with scope.NameScope('model_c'):
                 with ParameterSharing({'shared_scope': ''}):
                     param_4 = parameter_sharing_context.get_parameter_name(
                         'w')
                     self.assertEquals(param_4, 'global_scope/model_c/w')
                     with scope.NameScope('shared_scope'):
                         param_5 = parameter_sharing_context.\
                             get_parameter_name('w')
                         self.assertEquals(param_5,
                                           'global_scope/model_c/w')
Esempio n. 3
0
 def test_parameter_sharing_default_scopes(self):
     # Test no sharing default scopes
     param_1 = parameter_sharing_context.get_parameter_name('w')
     self.assertEquals(param_1, 'w')
     with scope.NameScope('scope'):
         param_2 = parameter_sharing_context.get_parameter_name('w')
         self.assertEquals(param_2, 'scope/w')
         with scope.NameScope('scope_2'):
             param_3 = parameter_sharing_context.get_parameter_name('w')
             self.assertEquals(param_3, 'scope/scope_2/w')
 def test_parameter_sharing_default_scopes(self):
     # Test no sharing default scopes
     param_1 = parameter_sharing_context.get_parameter_name('w')
     self.assertEquals(param_1, 'w')
     with scope.NameScope('scope'):
         param_2 = parameter_sharing_context.get_parameter_name('w')
         self.assertEquals(param_2, 'scope/w')
         with scope.NameScope('scope_2'):
             param_3 = parameter_sharing_context.get_parameter_name('w')
             self.assertEquals(param_3, 'scope/scope_2/w')
Esempio n. 5
0
 def test_parameter_sharing_subscopes(self):
     # Sharing only one of the subscopes
     with ParameterSharing({'global_scope/b': 'global_scope/a'}):
         with scope.NameScope('global_scope'):
             param_6 = parameter_sharing_context.get_parameter_name('w')
             self.assertEquals(param_6, 'global_scope/w')
             with scope.NameScope('a'):
                 param_7 = parameter_sharing_context.get_parameter_name('w')
                 self.assertEquals(param_7, 'global_scope/a/w')
             with scope.NameScope('b'):
                 param_8 = parameter_sharing_context.get_parameter_name('w')
                 self.assertEquals(param_8, 'global_scope/a/w')
             with scope.NameScope('c'):
                 param_9 = parameter_sharing_context.get_parameter_name('w')
                 self.assertEquals(param_9, 'global_scope/c/w')
 def test_parameter_sharing_subscopes(self):
     # Sharing only one of the subscopes
     with ParameterSharing({'global_scope/b': 'global_scope/a'}):
         with scope.NameScope('global_scope'):
             param_6 = parameter_sharing_context.get_parameter_name('w')
             self.assertEquals(param_6, 'global_scope/w')
             with scope.NameScope('a'):
                 param_7 = parameter_sharing_context.get_parameter_name('w')
                 self.assertEquals(param_7, 'global_scope/a/w')
             with scope.NameScope('b'):
                 param_8 = parameter_sharing_context.get_parameter_name('w')
                 self.assertEquals(param_8, 'global_scope/a/w')
             with scope.NameScope('c'):
                 param_9 = parameter_sharing_context.get_parameter_name('w')
                 self.assertEquals(param_9, 'global_scope/c/w')
Esempio n. 7
0
    def create_param(self, param_name, shape, initializer, optimizer=None,
                     ps_param=None):
        if isinstance(param_name, core.BlobReference):
            param_name = str(param_name)
        elif isinstance(param_name, six.string_types):
            # Parameter name will be equal to current Namescope that got
            # resolved with the respect of parameter sharing of the scopes.
            param_name = parameter_sharing_context.get_parameter_name(
                param_name)
        else:
            raise "Unsupported type for param_name"

        param_blob = core.BlobReference(param_name)

        if len(initializer) == 1:
            init_op_args = {}
        else:
            assert len(initializer) == 2
            init_op_args = initializer[1]
        if shape is not None:
            init_op_args.update({'shape': shape})

        param = layers.LayerParameter(
            parameter=param_blob,
            initializer=core.CreateOperator(
                initializer[0],
                [],
                param_blob,
                **init_op_args
            ),
            optimizer=optimizer,
            ps_param=ps_param,
        )

        return param
Esempio n. 8
0
    def create_param(self, param_name, shape, initializer, optimizer=None,
                     ps_param=None, regularizer=None):
        if isinstance(param_name, core.BlobReference):
            param_name = str(param_name)
        elif isinstance(param_name, six.string_types):
            # Parameter name will be equal to current Namescope that got
            # resolved with the respect of parameter sharing of the scopes.
            param_name = parameter_sharing_context.get_parameter_name(
                param_name)
        else:
            raise ValueError("Unsupported type for param_name")

        param_blob = core.BlobReference(param_name)

        if len(initializer) == 1:
            init_op_args = {}
        else:
            assert len(initializer) == 2
            init_op_args = copy.deepcopy(initializer[1])
        if shape is not None:
            assert 'shape' not in init_op_args
            init_op_args.update({'shape': shape})

        initializer_op = None
        if self._initialize_params:
            initializer_op = core.CreateOperator(
                initializer[0],
                [],
                param_blob,
                **init_op_args
            )

        param = layers.LayerParameter(
            parameter=param_blob,
            initializer=initializer_op,
            optimizer=optimizer,
            ps_param=ps_param,
            regularizer=regularizer
        )

        self._validate_param_shape(param_name, shape)

        self._validate_param_optim(param_name, optimizer)

        self._param_to_shape[param_name] = shape

        return param
Esempio n. 9
0
    def create_param(self, param_name, shape, initializer, tags=None):
        """
        Creates parameter with a given name and initializer.

        If param_name is instance of BlobRefernce - then this blob will be used
        to store parameter (no any logic will affect it's location).

        If param_name is instance of a string type, then the final blob will
        be created in the CurrentNameScope with the respect of all parameter
        sharing logic, i.e. 'resolved_name_scope/param_name'.

        Parameter sharing logic is going to override CurrentNameScope accoring
        to the rules that are specified through ParameterSharing contexts,
        all ParameterSharing contexts are applied recursively until there are no
        extra overrides present, where on each step the best match will be
        applied first.

        The following examples should clarify the way ParameterSharing logic
        works:

        As an example if this function is called with parameter 'w':
        a. Call from some scope 'global_scope' with no Parameter sharing:
          'global_scope/w'
        b. Call from scope 'scope_b', with override {'scope_b': 'scope_a'}:
          'scope_a/w'
        c. Call from scope 'scope_a', with override {'scope_a': ''}:
          'scope_a/w'
        d. Call from scope 'scope_b/shared', with overrides
          {'scope_b/shared': 'scope_b', 'scope_b': 'scope_a'}:
          'scope_a/w'
        d. Call from scope 'scope_b/unshared', with overrides
          {'scope_b/shared': 'scope_b', 'scope_b': 'scope_a'}:
          'scope_a/unshared/w'
        """
        # ParameterSharing works only for case when param_name is instance of
        # a string type. If param_name is a BlobReference - no attempt for
        # ParameterSharing will be applied.
        if isinstance(param_name, core.BlobReference):
            param_name = str(param_name)
        elif isinstance(param_name, six.string_types):
            # Parameter name will be equal to current Namescope that got
            # resolved with the respect of parameter sharing of the scopes.
            param_name = parameter_sharing_context.get_parameter_name(
                param_name)
        else:
            raise "Unsupported type for param_name"

        if param_name in self._parameters_info:
            assert self._parameters_info[param_name].shape == shape
            return self._parameters_info[param_name].blob

        param_info = initializer.create_param(
            param_name=core.BlobReference(param_name),
            init_net=self.param_init_net,
            shape=shape,
        )
        optim_context = OptimizerContext.current()
        for tag in self._normalize_tags(tags):
            if optim_context.has_optimizer(tag):
                # param_info will check optimizer has not been set
                param_info.optimizer = optim_context.get_optimizer(tag)
        if not param_info.optimizer and optim_context.has_optimizer(DEFAULT_OPTIM):
            param_info.optimizer = optim_context.get_optimizer(DEFAULT_OPTIM)

        reg_context = RegularizerContext.current()
        param_info.regularizer = reg_context

        self._parameters_info[param_name] = param_info
        # Add param to legacy structs as well, so all other functions for
        # parameters are still working.
        self.AddParameter(param_info.blob, tags)
        return param_info.blob
Esempio n. 10
0
    def create_param(self, param_name, shape, initializer, tags=None):
        """
        Creates parameter with a given name and initializer.

        If param_name is instance of BlobRefernce - then this blob will be used
        to store parameter (no any logic will affect it's location).

        If param_name is instance of a string type, then the final blob will
        be created in the CurrentNameScope with the respect of all parameter
        sharing logic, i.e. 'resolved_name_scope/param_name'.

        Parameter sharing logic is going to override CurrentNameScope according
        to the rules that are specified through ParameterSharing contexts,
        all ParameterSharing contexts are applied recursively until there are no
        extra overrides present, where on each step the best match will be
        applied first.

        The following examples should clarify the way ParameterSharing logic
        works:

        As an example if this function is called with parameter 'w':
        a. Call from some scope 'global_scope' with no Parameter sharing:
          'global_scope/w'
        b. Call from scope 'scope_b', with override {'scope_b': 'scope_a'}:
          'scope_a/w'
        c. Call from scope 'scope_a', with override {'scope_a': ''}:
          'scope_a/w'
        d. Call from scope 'scope_b/shared', with overrides
          {'scope_b/shared': 'scope_b', 'scope_b': 'scope_a'}:
          'scope_a/w'
        d. Call from scope 'scope_b/unshared', with overrides
          {'scope_b/shared': 'scope_b', 'scope_b': 'scope_a'}:
          'scope_a/unshared/w'
        """
        # ParameterSharing works only for case when param_name is instance of
        # a string type. If param_name is a BlobReference - no attempt for
        # ParameterSharing will be applied.
        if isinstance(param_name, core.BlobReference):
            param_name = str(param_name)
        elif isinstance(param_name, six.string_types):
            # Parameter name will be equal to current Namescope that got
            # resolved with the respect of parameter sharing of the scopes.
            param_name = parameter_sharing_context.get_parameter_name(
                param_name)
        else:
            raise TypeError("Unsupported type for param_name")

        if param_name in self._parameters_info:
            assert self._parameters_info[param_name].shape == shape
            return self._parameters_info[param_name].blob

        param_info = initializer.create_param(
            param_name=core.BlobReference(param_name),
            init_net=self.param_init_net,
            shape=shape,
        )
        optim_context = OptimizerContext.current()
        for tag in self._normalize_tags(tags):
            if optim_context.has_optimizer(tag):
                # param_info will check optimizer has not been set
                param_info.optimizer = optim_context.get_optimizer(tag)
        if not param_info.optimizer and optim_context.has_optimizer(
                DEFAULT_OPTIM):
            param_info.optimizer = optim_context.get_optimizer(DEFAULT_OPTIM)

        reg_context = RegularizerContext.current()
        param_info.regularizer = reg_context

        self._parameters_info[param_name] = param_info
        # Add param to legacy structs as well, so all other functions for
        # parameters are still working.
        self.AddParameter(param_info.blob, tags)
        return param_info.blob