Пример #1
0
  def __init__(self, **kwargs):
    """Initializes the local UBM-GMM tool chain with the given file selector object"""
#    logger.warn("This class must be checked. Please verify that I didn't do any mistake here. I had to rename 'train_projector' into a 'train_enroller'!")
    # initialize the UBMGMM base class
    GMM.__init__(self, **kwargs)
    # register a different set of functions in the Tool base class
    Algorithm.__init__(self, requires_enroller_training = True, performs_projection = False)
Пример #2
0
    def __init__(self, **kwargs):
        """Generates a test value that is read and written"""

        # call base class constructor registering that this tool performs everything.
        Algorithm.__init__(self,
                           performs_projection=False,
                           requires_enroller_training=True)
Пример #3
0
    def __init__(
        self,
        # JFA training
        subspace_dimension_of_u,  # U subspace dimension
        subspace_dimension_of_v,  # V subspace dimension
        jfa_training_iterations=10,  # Number of EM iterations for the JFA training
        # JFA enrollment
        jfa_enroll_iterations=1,  # Number of iterations for the enrollment phase
        # parameters of the GMM
        **kwargs):
        """Initializes the local UBM-GMM tool with the given file selector object"""
        # call base class constructor
        GMM.__init__(self, **kwargs)

        # call tool constructor to overwrite what was set before
        Algorithm.__init__(self,
                           performs_projection=True,
                           use_projected_features_for_enrollment=True,
                           requires_enroller_training=True,
                           subspace_dimension_of_u=subspace_dimension_of_u,
                           subspace_dimension_of_v=subspace_dimension_of_v,
                           jfa_training_iterations=jfa_training_iterations,
                           jfa_enroll_iterations=jfa_enroll_iterations,
                           multiple_model_scoring=None,
                           multiple_probe_scoring=None,
                           **kwargs)

        self.subspace_dimension_of_u = subspace_dimension_of_u
        self.subspace_dimension_of_v = subspace_dimension_of_v
        self.jfa_training_iterations = jfa_training_iterations
        self.jfa_enroll_iterations = jfa_enroll_iterations
        self.jfa_trainer = bob.learn.em.JFATrainer()
Пример #4
0
    def __init__(
        self,
        # ISV training
        subspace_dimension_of_u,  # U subspace dimension
        isv_training_iterations=10,  # Number of EM iterations for the ISV training
        # ISV enrollment
        isv_enroll_iterations=1,  # Number of iterations for the enrollment phase
        multiple_probe_scoring=None,  # scoring when multiple probe files are available

        # parameters of the GMM
        **kwargs):
        """Initializes the local UBM-GMM tool with the given file selector object"""
        # call base class constructor with its set of parameters
        GMM.__init__(self, **kwargs)

        # call tool constructor to overwrite what was set before
        Algorithm.__init__(
            self,
            performs_projection=True,
            use_projected_features_for_enrollment=True,
            requires_enroller_training=
            False,  # not needed anymore because it's done while training the projector
            split_training_features_by_client=True,
            subspace_dimension_of_u=subspace_dimension_of_u,
            isv_training_iterations=isv_training_iterations,
            isv_enroll_iterations=isv_enroll_iterations,
            multiple_model_scoring=None,
            multiple_probe_scoring=multiple_probe_scoring,
            **kwargs)

        self.subspace_dimension_of_u = subspace_dimension_of_u
        self.isv_training_iterations = isv_training_iterations
        self.isv_enroll_iterations = isv_enroll_iterations
        self.isv_trainer = bob.learn.em.ISVTrainer(self.relevance_factor)
Пример #5
0
  def __init__(self, **kwargs):
    """Initializes the local UBM-GMM tool chain with the given file selector object"""
#    logger.warn("This class must be checked. Please verify that I didn't do any mistake here. I had to rename 'train_projector' into a 'train_enroller'!")
    # initialize the UBMGMM base class
    GMM.__init__(self, **kwargs)
    # register a different set of functions in the Tool base class
    Algorithm.__init__(self, requires_enroller_training = True, performs_projection = False)
    def __init__(self, **kwargs):
        """Generates a test value that is read and written"""

        # call base class constructor registering that this tool performs everything.
        Algorithm.__init__(
            self,
            performs_projection=False,
            requires_enroller_training=True
        )
Пример #7
0
    def __init__(self,
                 distance_function=bob.math.chi_square,
                 is_distance_function=True,
                 multiple_probe_scoring='average'):

        # call base class constructor
        Algorithm.__init__(self,
                           distance_function=str(distance_function),
                           is_distance_function=is_distance_function,
                           multiple_model_scoring=None,
                           multiple_probe_scoring=multiple_probe_scoring)

        # remember distance function
        self.distance_function = distance_function
        self.factor = -1. if is_distance_function else 1
Пример #8
0
    def __init__(self,
                 machine_type='C_SVC',
                 kernel_type='LINEAR',
                 C=1,
                 **kwargs):

        # initialize the UBMGMM base class
        GMMRegular.__init__(self, **kwargs)
        # register a different set of functions in the Tool base class
        Algorithm.__init__(self,
                           requires_enroller_training=True,
                           performs_projection=False)

        self.machine_type = machine_type
        self.kernel_type = kernel_type
        self.C = C
Пример #9
0
  def __init__(
      self,
      distance_function = bob.math.chi_square,
      is_distance_function = True,
      multiple_probe_scoring = 'average'
  ):

    # call base class constructor
    Algorithm.__init__(
        self,

        distance_function = str(distance_function),
        is_distance_function = is_distance_function,

        multiple_model_scoring = None,
        multiple_probe_scoring = multiple_probe_scoring
    )

    # remember distance function
    self.distance_function = distance_function
    self.factor =  -1. if is_distance_function else 1
Пример #10
0
  def __init__(
      self,
      # ISV training
      subspace_dimension_of_u,       # U subspace dimension
      isv_training_iterations = 10,  # Number of EM iterations for the ISV training
      # ISV enrollment
      isv_enroll_iterations = 1,     # Number of iterations for the enrollment phase

      multiple_probe_scoring = None, # scoring when multiple probe files are available

      # parameters of the GMM
      **kwargs
  ):
    """Initializes the local UBM-GMM tool with the given file selector object"""
    # call base class constructor with its set of parameters
    GMM.__init__(self, **kwargs)

    # call tool constructor to overwrite what was set before
    Algorithm.__init__(
        self,
        performs_projection = True,
        use_projected_features_for_enrollment = True,
        requires_enroller_training = False, # not needed anymore because it's done while training the projector
        split_training_features_by_client = True,

        subspace_dimension_of_u = subspace_dimension_of_u,
        isv_training_iterations = isv_training_iterations,
        isv_enroll_iterations = isv_enroll_iterations,

        multiple_model_scoring = None,
        multiple_probe_scoring = multiple_probe_scoring,
        **kwargs
    )

    self.subspace_dimension_of_u = subspace_dimension_of_u
    self.isv_training_iterations = isv_training_iterations
    self.isv_enroll_iterations = isv_enroll_iterations
    self.isv_trainer = bob.learn.em.ISVTrainer(self.relevance_factor)
Пример #11
0
  def __init__(
      self,
      # JFA training
      subspace_dimension_of_u,       # U subspace dimension
      subspace_dimension_of_v,       # V subspace dimension
      jfa_training_iterations = 10,  # Number of EM iterations for the JFA training
      # JFA enrollment
      jfa_enroll_iterations = 1,     # Number of iterations for the enrollment phase
      # parameters of the GMM
      **kwargs
  ):
    """Initializes the local UBM-GMM tool with the given file selector object"""
    # call base class constructor
    GMM.__init__(self, **kwargs)

    # call tool constructor to overwrite what was set before
    Algorithm.__init__(
        self,
        performs_projection = True,
        use_projected_features_for_enrollment = True,
        requires_enroller_training = True,

        subspace_dimension_of_u = subspace_dimension_of_u,
        subspace_dimension_of_v = subspace_dimension_of_v,
        jfa_training_iterations = jfa_training_iterations,
        jfa_enroll_iterations = jfa_enroll_iterations,

        multiple_model_scoring = None,
        multiple_probe_scoring = None,
        **kwargs
    )

    self.subspace_dimension_of_u = subspace_dimension_of_u
    self.subspace_dimension_of_v = subspace_dimension_of_v
    self.jfa_training_iterations = jfa_training_iterations
    self.jfa_enroll_iterations = jfa_enroll_iterations
    self.jfa_trainer = bob.learn.em.JFATrainer()
Пример #12
0
  def score_for_multiple_probes(self, model, probes):
    """This function computes the score between the given model and several given probe files."""
    assert isinstance(model, bob.learn.em.ISVMachine)
    [self._check_projected(probe) for probe in probes]
    if self.probe_fusion_function is not None:
      # When a multiple probe fusion function is selected, use it
      return Algorithm.score_for_multiple_probes(self, model, probes)
    else:
      # Otherwise: compute joint likelihood of all probe features
      # create GMM statistics from first probe statistics
#      import pdb; pdb.set_trace()
      gmmstats_acc = bob.learn.em.GMMStats(probes[0][0])
#      gmmstats_acc = probes[0][0]
      # add all other probe statistics
      for i in range(1,len(probes)):
        gmmstats_acc += probes[i][0]
      # compute ISV score with the accumulated statistics
      projected_isv_acc = numpy.ndarray(shape=(self.ubm.shape[0]*self.ubm.shape[1],), dtype=numpy.float64)
      model.estimate_ux(gmmstats_acc, projected_isv_acc)
      return model.forward_ux(gmmstats_acc, projected_isv_acc)
Пример #13
0
  def score_for_multiple_probes(self, model, probes):
    """This function computes the score between the given model and several given probe files."""
    assert isinstance(model, bob.learn.em.ISVMachine)
    [self._check_projected(probe) for probe in probes]
    if self.probe_fusion_function is not None:
      # When a multiple probe fusion function is selected, use it
      return Algorithm.score_for_multiple_probes(self, model, probes)
    else:
      # Otherwise: compute joint likelihood of all probe features
      # create GMM statistics from first probe statistics
#      import pdb; pdb.set_trace()
      gmmstats_acc = bob.learn.em.GMMStats(probes[0][0])
#      gmmstats_acc = probes[0][0]
      # add all other probe statistics
      for i in range(1,len(probes)):
        gmmstats_acc += probes[i][0]
      # compute ISV score with the accumulated statistics
      projected_isv_acc = numpy.ndarray(shape=(self.ubm.shape[0]*self.ubm.shape[1],), dtype=numpy.float64)
      model.estimate_ux(gmmstats_acc, projected_isv_acc)
      return model.forward_ux(gmmstats_acc, projected_isv_acc)
Пример #14
0
  def __init__(
      self,
      # IVector training
      subspace_dimension_of_t,       # T subspace dimension
      tv_training_iterations = 25,   # Number of EM iterations for the JFA training
      update_sigma = True,
      use_whitening = True,
      use_lda = False,
      use_wccn = False,
      use_plda = False,
      lda_dim = 50,
      plda_dim_F  = 50,
      plda_dim_G = 50,
      plda_training_iterations = 50,
      # parameters of the GMM
      **kwargs
  ):
    """Initializes the local GMM tool with the given file selector object"""
    # call base class constructor with its set of parameters
    GMM.__init__(self, **kwargs)

    # call tool constructor to overwrite what was set before
    Algorithm.__init__(
        self,
        performs_projection = True,
        use_projected_features_for_enrollment = True,
        requires_enroller_training = False, # not needed anymore because it's done while training the projector
        split_training_features_by_client = True,

        subspace_dimension_of_t = subspace_dimension_of_t,
        tv_training_iterations = tv_training_iterations,
        update_sigma = update_sigma,
        use_whitening = use_whitening,
        use_lda = use_lda,
        use_wccn = use_wccn,
        use_plda = use_plda,
        lda_dim = lda_dim,
        plda_dim_F  = plda_dim_F,
        plda_dim_G = plda_dim_G,
        plda_training_iterations = plda_training_iterations,

        multiple_model_scoring = None,
        multiple_probe_scoring = None,
        **kwargs
    )

    self.update_sigma = update_sigma
    self.use_whitening = use_whitening
    self.use_lda = use_lda
    self.use_wccn = use_wccn
    self.use_plda = use_plda
    self.subspace_dimension_of_t = subspace_dimension_of_t
    self.tv_training_iterations = tv_training_iterations

    self.ivector_trainer = bob.learn.em.IVectorTrainer(update_sigma=update_sigma)
    self.whitening_trainer = bob.learn.linear.WhiteningTrainer()

    self.lda_dim = lda_dim
    self.lda_trainer = bob.learn.linear.FisherLDATrainer(strip_to_rank=False)
    self.wccn_trainer = bob.learn.linear.WCCNTrainer()
    self.plda_trainer = bob.learn.em.PLDATrainer()
    self.plda_dim_F  = plda_dim_F
    self.plda_dim_G = plda_dim_G
    self.plda_training_iterations = plda_training_iterations
Пример #15
0
  def __init__(
      self,
      # IVector training
      subspace_dimension_of_t,       # T subspace dimension
      tv_training_iterations = 25,   # Number of EM iterations for the JFA training
      update_sigma = True,
      use_whitening = True,
      use_lda = False,
      use_wccn = False,
      use_plda = False,
      lda_dim = 50,
      plda_dim_F  = 50,
      plda_dim_G = 50,
      plda_training_iterations = 50,
      # parameters of the GMM
      **kwargs
  ):
    """Initializes the local GMM tool with the given file selector object"""
    # call base class constructor with its set of parameters
    GMM.__init__(self, **kwargs)

    # call tool constructor to overwrite what was set before
    Algorithm.__init__(
        self,
        performs_projection = True,
        use_projected_features_for_enrollment = True,
        requires_enroller_training = False, # not needed anymore because it's done while training the projector
        split_training_features_by_client = True,

        subspace_dimension_of_t = subspace_dimension_of_t,
        tv_training_iterations = tv_training_iterations,
        update_sigma = update_sigma,
        use_whitening = use_whitening,
        use_lda = use_lda,
        use_wccn = use_wccn,
        use_plda = use_plda,
        lda_dim = lda_dim,
        plda_dim_F  = plda_dim_F,
        plda_dim_G = plda_dim_G,
        plda_training_iterations = plda_training_iterations,

        multiple_model_scoring = None,
        multiple_probe_scoring = None,
        **kwargs
    )

    self.update_sigma = update_sigma
    self.use_whitening = use_whitening
    self.use_lda = use_lda
    self.use_wccn = use_wccn
    self.use_plda = use_plda
    self.subspace_dimension_of_t = subspace_dimension_of_t
    self.tv_training_iterations = tv_training_iterations
    
    self.ivector_trainer = bob.learn.em.IVectorTrainer(update_sigma=update_sigma)
    self.whitening_trainer = bob.learn.linear.WhiteningTrainer()
    
    self.lda_dim = lda_dim
    self.lda_trainer = bob.learn.linear.FisherLDATrainer(strip_to_rank=False)
    self.wccn_trainer = bob.learn.linear.WCCNTrainer()
    self.plda_trainer = bob.learn.em.PLDATrainer()
    self.plda_dim_F  = plda_dim_F
    self.plda_dim_G = plda_dim_G
    self.plda_training_iterations = plda_training_iterations
Пример #16
0
    def __init__(
            self,
            # parameters for the tool
            gabor_jet_similarity_type,
            multiple_feature_scoring='max_jet',
            # some similarity functions might need a GaborWaveletTransform class, so we have to provide the parameters here as well...
            gabor_directions=8,
            gabor_scales=5,
            gabor_sigma=2. * math.pi,
            gabor_maximum_frequency=math.pi / 2.,
            gabor_frequency_step=math.sqrt(.5),
            gabor_power_of_k=0,
            gabor_dc_free=True):

        # call base class constructor
        Algorithm.__init__(self,
                           gabor_jet_similarity_type=gabor_jet_similarity_type,
                           multiple_feature_scoring=multiple_feature_scoring,
                           gabor_directions=gabor_directions,
                           gabor_scales=gabor_scales,
                           gabor_sigma=gabor_sigma,
                           gabor_maximum_frequency=gabor_maximum_frequency,
                           gabor_frequency_step=gabor_frequency_step,
                           gabor_power_of_k=gabor_power_of_k,
                           gabor_dc_free=gabor_dc_free,
                           multiple_model_scoring=None,
                           multiple_probe_scoring=None)

        # the Gabor wavelet transform; used by (some of) the Gabor jet similarities
        gwt = bob.ip.gabor.Transform(number_of_scales=gabor_scales,
                                     number_of_directions=gabor_directions,
                                     sigma=gabor_sigma,
                                     k_max=gabor_maximum_frequency,
                                     k_fac=gabor_frequency_step,
                                     power_of_k=gabor_power_of_k,
                                     dc_free=gabor_dc_free)

        # jet comparison function
        self.similarity_function = bob.ip.gabor.Similarity(
            gabor_jet_similarity_type, gwt)

        # how to proceed with multiple features per model
        self.jet_scoring = {
            'average_model': None,  # compute an average model
            'average': numpy.average,  # compute the average similarity
            'min_jet':
            min,  # for each jet location, compute the minimum similarity
            'max_jet':
            max,  # for each jet location, compute the maximum similarity
            'med_jet': numpy.
            median,  # for each jet location, compute the median similarity
            'min_graph': numpy.
            average,  # for each model graph, compute the minimum average similarity
            'max_graph': numpy.
            average,  # for each model graph, compute the maximum average similarity
            'med_graph': numpy.
            average,  # for each model graph, compute the median average similarity
        }[multiple_feature_scoring]

        self.graph_scoring = {
            'average_model': None,  # compute an average model
            'average': numpy.average,  # compute the average similarity
            'min_jet': numpy.
            average,  # for each jet location, compute the minimum similarity
            'max_jet': numpy.
            average,  # for each jet location, compute the maximum similarity
            'med_jet': numpy.
            average,  # for each jet location, compute the median similarity
            'min_graph':
            min,  # for each model graph, compute the minimum average similarity
            'max_graph':
            max,  # for each model graph, compute the maximum average similarity
            'med_graph': numpy.
            median,  # for each model graph, compute the median average similarity
        }[multiple_feature_scoring]
Пример #17
0
  def __init__(
      self,
      # parameters for the GMM
      number_of_gaussians,
      # parameters of UBM training
      kmeans_training_iterations = 25,   # Maximum number of iterations for K-Means
      gmm_training_iterations = 25,      # Maximum number of iterations for ML GMM Training
      training_threshold = 5e-4,         # Threshold to end the ML training
      variance_threshold = 5e-4,         # Minimum value that a variance can reach
      update_weights = True,
      update_means = True,
      update_variances = True,
      # parameters of the GMM enrollment
      relevance_factor = 4,         # Relevance factor as described in Reynolds paper
      gmm_enroll_iterations = 1,    # Number of iterations for the enrollment phase
      responsibility_threshold = 0, # If set, the weight of a particular Gaussian will at least be greater than this threshold. In the case the real weight is lower, the prior mean value will be used to estimate the current mean and variance.
      INIT_SEED = 5489,
      # scoring
      scoring_function = bob.learn.em.linear_scoring
  ):
    """Initializes the local UBM-GMM tool chain with the given file selector object"""

    # call base class constructor and register that this tool performs projection
    Algorithm.__init__(
        self,
        performs_projection = True,
        use_projected_features_for_enrollment = False,

        number_of_gaussians = number_of_gaussians,
        kmeans_training_iterations = kmeans_training_iterations,
        gmm_training_iterations = gmm_training_iterations,
        training_threshold = training_threshold,
        variance_threshold = variance_threshold,
        update_weights = update_weights,
        update_means = update_means,
        update_variances = update_variances,
        relevance_factor = relevance_factor,
        gmm_enroll_iterations = gmm_enroll_iterations,
        responsibility_threshold = responsibility_threshold,
        INIT_SEED = INIT_SEED,
        scoring_function = str(scoring_function),

        multiple_model_scoring = None,
        multiple_probe_scoring = 'average'
    )

    # copy parameters
    self.gaussians = number_of_gaussians
    self.kmeans_training_iterations = kmeans_training_iterations
    self.gmm_training_iterations = gmm_training_iterations
    self.training_threshold = training_threshold
    self.variance_threshold = variance_threshold
    self.update_weights = update_weights
    self.update_means = update_means
    self.update_variances = update_variances
    self.relevance_factor = relevance_factor
    self.gmm_enroll_iterations = gmm_enroll_iterations
    self.init_seed = INIT_SEED
    self.rng = bob.core.random.mt19937(self.init_seed)
    self.responsibility_threshold = responsibility_threshold
    self.scoring_function = scoring_function

    self.ubm = None
    self.kmeans_trainer = bob.learn.em.KMeansTrainer()
    self.ubm_trainer = bob.learn.em.ML_GMMTrainer(self.update_means, self.update_variances, self.update_weights, self.responsibility_threshold)
Пример #18
0
  def __init__(
      self,
      # parameters for the tool
      gabor_jet_similarity_type,
      multiple_feature_scoring = 'max_jet',
      # some similarity functions might need a GaborWaveletTransform class, so we have to provide the parameters here as well...
      gabor_directions = 8,
      gabor_scales = 5,
      gabor_sigma = 2. * math.pi,
      gabor_maximum_frequency = math.pi / 2.,
      gabor_frequency_step = math.sqrt(.5),
      gabor_power_of_k = 0,
      gabor_dc_free = True
  ):

    # call base class constructor
    Algorithm.__init__(
        self,

        gabor_jet_similarity_type = gabor_jet_similarity_type,
        multiple_feature_scoring = multiple_feature_scoring,
        gabor_directions = gabor_directions,
        gabor_scales = gabor_scales,
        gabor_sigma = gabor_sigma,
        gabor_maximum_frequency = gabor_maximum_frequency,
        gabor_frequency_step = gabor_frequency_step,
        gabor_power_of_k = gabor_power_of_k,
        gabor_dc_free = gabor_dc_free,

        multiple_model_scoring = None,
        multiple_probe_scoring = None
    )

    # the Gabor wavelet transform; used by (some of) the Gabor jet similarities
    gwt = bob.ip.gabor.Transform(
        number_of_scales = gabor_scales,
        number_of_directions = gabor_directions,
        sigma = gabor_sigma,
        k_max = gabor_maximum_frequency,
        k_fac = gabor_frequency_step,
        power_of_k = gabor_power_of_k,
        dc_free = gabor_dc_free
    )

    # jet comparison function
    self.similarity_function = bob.ip.gabor.Similarity(gabor_jet_similarity_type, gwt)

    # how to proceed with multiple features per model
    self.jet_scoring = {
        'average_model' : None, # compute an average model
        'average' : numpy.average, # compute the average similarity
        'min_jet' : min, # for each jet location, compute the minimum similarity
        'max_jet' : max, # for each jet location, compute the maximum similarity
        'med_jet' : numpy.median, # for each jet location, compute the median similarity
        'min_graph' : numpy.average, # for each model graph, compute the minimum average similarity
        'max_graph' : numpy.average, # for each model graph, compute the maximum average similarity
        'med_graph' : numpy.average, # for each model graph, compute the median average similarity
    }[multiple_feature_scoring]

    self.graph_scoring = {
        'average_model' : None, # compute an average model
        'average' : numpy.average, # compute the average similarity
        'min_jet' : numpy.average, # for each jet location, compute the minimum similarity
        'max_jet' : numpy.average, # for each jet location, compute the maximum similarity
        'med_jet' : numpy.average, # for each jet location, compute the median similarity
        'min_graph' : min, # for each model graph, compute the minimum average similarity
        'max_graph' : max, # for each model graph, compute the maximum average similarity
        'med_graph' : numpy.median, # for each model graph, compute the median average similarity
    }[multiple_feature_scoring]
Пример #19
0
  def __init__(
      self,
      # parameters for the GMM
      number_of_gaussians,
      # parameters of UBM training
      kmeans_training_iterations = 25,   # Maximum number of iterations for K-Means
      gmm_training_iterations = 25,      # Maximum number of iterations for ML GMM Training
      training_threshold = 5e-4,         # Threshold to end the ML training
      variance_threshold = 5e-4,         # Minimum value that a variance can reach
      update_weights = True,
      update_means = True,
      update_variances = True,
      # parameters of the GMM enrollment
      relevance_factor = 4,         # Relevance factor as described in Reynolds paper
      gmm_enroll_iterations = 1,    # Number of iterations for the enrollment phase
      responsibility_threshold = 0, # If set, the weight of a particular Gaussian will at least be greater than this threshold. In the case the real weight is lower, the prior mean value will be used to estimate the current mean and variance.
      INIT_SEED = 5489,
      # scoring
      scoring_function = bob.learn.em.linear_scoring
  ):
    """Initializes the local UBM-GMM tool chain with the given file selector object"""

    # call base class constructor and register that this tool performs projection
    Algorithm.__init__(
        self,
        performs_projection = True,
        use_projected_features_for_enrollment = False,

        number_of_gaussians = number_of_gaussians,
        kmeans_training_iterations = kmeans_training_iterations,
        gmm_training_iterations = gmm_training_iterations,
        training_threshold = training_threshold,
        variance_threshold = variance_threshold,
        update_weights = update_weights,
        update_means = update_means,
        update_variances = update_variances,
        relevance_factor = relevance_factor,
        gmm_enroll_iterations = gmm_enroll_iterations,
        responsibility_threshold = responsibility_threshold,
        INIT_SEED = INIT_SEED,
        scoring_function = str(scoring_function),

        multiple_model_scoring = None,
        multiple_probe_scoring = 'average'
    )

    # copy parameters
    self.gaussians = number_of_gaussians
    self.kmeans_training_iterations = kmeans_training_iterations
    self.gmm_training_iterations = gmm_training_iterations
    self.training_threshold = training_threshold
    self.variance_threshold = variance_threshold
    self.update_weights = update_weights
    self.update_means = update_means
    self.update_variances = update_variances
    self.relevance_factor = relevance_factor
    self.gmm_enroll_iterations = gmm_enroll_iterations
    self.init_seed = INIT_SEED
    self.rng = bob.core.random.mt19937(self.init_seed)
    self.responsibility_threshold = responsibility_threshold
    self.scoring_function = scoring_function

    self.ubm = None
    self.kmeans_trainer = bob.learn.em.KMeansTrainer()
    self.ubm_trainer = bob.learn.em.ML_GMMTrainer(self.update_means, self.update_variances, self.update_weights, self.responsibility_threshold)