Beispiel #1
0
    def _learnBlocking(self, data_d, eta, epsilon):
        confident_nonduplicates = blocking.semiSupervisedNonDuplicates(self.data_d,
                                                                       self.data_model)
                                                                       

        self.training_pairs[0].extend(confident_nonduplicates)

        predicate_functions = (predicates.wholeFieldPredicate,
                               predicates.tokenFieldPredicate,
                               predicates.commonIntegerPredicate,
                               predicates.sameThreeCharStartPredicate,
                               predicates.sameFiveCharStartPredicate,
                               predicates.sameSevenCharStartPredicate,
                               predicates.nearIntegersPredicate,
                               predicates.commonFourGram,
                               predicates.commonSixGram,
                               )

        tfidf_thresholds = [0.2, 0.4, 0.6, 0.8]
        full_string_records = {}
        for k, v in data_d.iteritems() :
          document = ''
          for field in self.data_model['fields'].keys() :
            document += v[field]
            document += ' '
          full_string_records[k] = document

        self.df_index = tfidf.documentFrequency(full_string_records)

        blocker = blocking.Blocking(self.training_pairs,
                                    predicate_functions,
                                    self.data_model,
                                    tfidf_thresholds,
                                    self.df_index,
                                    eta,
                                    epsilon
                                    )

        learned_predicates = blocker.trainBlocking()

        return learned_predicates
Beispiel #2
0
    def _learnBlocking(self, data_d):
        confident_nonduplicates = blocking.semiSupervisedNonDuplicates(self.data_d,
                                                                       self.data_model)

        self.training_pairs[0].extend(confident_nonduplicates)

        predicate_functions = (wholeFieldPredicate,
                               tokenFieldPredicate,
                               commonIntegerPredicate,
                               sameThreeCharStartPredicate,
                               sameFiveCharStartPredicate,
                               sameSevenCharStartPredicate,
                               nearIntegersPredicate,
                               commonFourGram,
                               commonSixGram,
                               )

        blocker = blocking.Blocking(self.training_pairs,
                                    predicate_functions,
                                    self.data_model)

        predicates = blocker.trainBlocking()

        return predicates