Example #1
0
class AverageLinkageTestCases(unittest.TestCase):
    """Test AverageLinkage functions.

    abydos.distance.AverageLinkage
    """

    cmp = AverageLinkage()
    cmp1 = AverageLinkage(tokenizer=QGrams(1))
    cmp_pfx = AverageLinkage(metric=Prefix())

    def test_average_linkage_dist(self):
        """Test abydos.distance.AverageLinkage.dist."""
        # Base cases
        self.assertEqual(self.cmp.dist('', ''), 0.0)
        self.assertEqual(self.cmp.dist('a', ''), 1.0)
        self.assertEqual(self.cmp.dist('', 'a'), 1.0)
        self.assertEqual(self.cmp.dist('abc', ''), 1.0)
        self.assertEqual(self.cmp.dist('', 'abc'), 1.0)
        self.assertEqual(self.cmp.dist('abc', 'abc'), 0.75)
        self.assertEqual(self.cmp.dist('abcd', 'efgh'), 0.96)

        self.assertAlmostEqual(self.cmp.dist('Nigel', 'Niall'), 0.8611111111)
        self.assertAlmostEqual(self.cmp.dist('Niall', 'Nigel'), 0.8611111111)
        self.assertAlmostEqual(self.cmp.dist('Colin', 'Coiln'), 0.8333333333)
        self.assertAlmostEqual(self.cmp.dist('Coiln', 'Colin'), 0.8333333333)
        self.assertAlmostEqual(self.cmp.dist('ATCAACGAGT', 'AACGATTAG'),
                               0.7545454545)

        self.assertEqual(self.cmp1.dist('aaa', 'aaa'), 0.0)
        self.assertAlmostEqual(self.cmp_pfx.dist('ababab', 'ab'), 0.714285714)
Example #2
0
    def __init__(self, model='latin'):
        self.model = model
        self.impH = input_helpers.InputHelper()
        self.ST = syllable_tokenizer.SyllableTokenizer()
        # Phonetic Encoder
        self.pe = Ainsworth()
        # Soundex Firstname Algorithm
        self.pshp_soundex_first = PSHPSoundexFirst()
        # String Distance algorithms
        self.algos = [
            IterativeSubString(),
            BISIM(),
            DiscountedLevenshtein(),
            Prefix(),
            LCSstr(),
            MLIPNS(),
            Strcmp95(),
            MRA(),
            Editex(),
            SAPS(),
            FlexMetric(),
            JaroWinkler(mode='Jaro'),
            HigueraMico(),
            Sift4(),
            Eudex(),
            ALINE(),
            Covington(),
            PhoneticEditDistance()
        ]
        self.algo_names = [
            'iterativesubstring', 'bisim', 'discountedlevenshtein', 'prefix',
            'lcsstr', 'mlipns', 'strcmp95', 'mra', 'editex', 'saps',
            'flexmetric', 'jaro', 'higueramico', 'sift4', 'eudex', 'aline',
            'covington', 'phoneticeditdistance'
        ]

        # extract model tarball into directory if doesnt exist
        model_dir = os.path.join(os.path.dirname(__file__), "models",
                                 self.model)
        if not os.path.exists(model_dir):
            os.makedirs(model_dir)
            tar = tarfile.open(
                os.path.join(os.path.dirname(__file__), "models",
                             self.model + ".tar.gz"), "r:gz")
            tar.extractall(model_dir)
            tar.close()

        # String Distance Pipeline (Level 0/Base Model)
        self.baseModel = joblib.load(os.path.join(model_dir, 'base.pkl'))

        # Character Embedding Network (Level 0/Base Model)
        self.vocab = preprocess.VocabularyProcessor(
            max_document_length=15,
            min_frequency=0).restore(os.path.join(model_dir, 'vocab'))

        siamese_model = os.path.join(model_dir, 'siamese')

        graph = tf.Graph()
        with graph.as_default() as graph:
            self.sess = tf.Session()
            with self.sess.as_default():
                # Load the saved meta graph and restore variables
                saver = tf.train.import_meta_graph(
                    '{}.meta'.format(siamese_model))
                self.sess.run(tf.global_variables_initializer())
                saver.restore(self.sess, siamese_model)
                # Get the placeholders from the graph by name
            self.input_x1 = graph.get_operation_by_name('input_x1').outputs[0]
            self.input_x2 = graph.get_operation_by_name('input_x2').outputs[0]

            self.dropout_keep_prob = graph.get_operation_by_name(
                'dropout_keep_prob').outputs[0]
            self.prediction = graph.get_operation_by_name(
                'output/distance').outputs[0]
            self.sim = graph.get_operation_by_name(
                'accuracy/temp_sim').outputs[0]

        # Logreg (Level 1/Meta Model)
        self.metaModel = joblib.load(os.path.join(model_dir, 'meta.pkl'))

        # seen names (mapping dict from raw name to processed name)
        self.seen_names = {}

        # seen pairs (mapping dict from name pair tuple to similarity)
        self.seen_pairs = {}
Example #3
0
import unidecode
from abydos.distance import (IterativeSubString, BISIM, DiscountedLevenshtein, Prefix, LCSstr, MLIPNS, Strcmp95,
							MRA, Editex, SAPS, FlexMetric, JaroWinkler, HigueraMico, Sift4,
							Eudex, ALINE, PhoneticEditDistance)
from abydos.phonetic import PSHPSoundexFirst, Ainsworth
from abydos.phones import *
import re
from sklearn.preprocessing import MinMaxScaler

# Featurizer
pshp_soundex_first = PSHPSoundexFirst()
pe = Ainsworth()	
iss = IterativeSubString()
bisim = BISIM()
dlev = DiscountedLevenshtein()
prefix = Prefix()
lcs = LCSstr()
mlipns = MLIPNS()
strcmp95 = Strcmp95()
mra = MRA()
editex = Editex()
saps = SAPS()
flexmetric = FlexMetric()
jaro = JaroWinkler(mode='Jaro')
higuera_mico = HigueraMico()
sift4 = Sift4()
eudex = Eudex()
aline = ALINE()
phonetic_edit = PhoneticEditDistance()
algos = [iss, bisim, dlev, prefix, lcs, mlipns, strcmp95, mra, editex, saps, flexmetric, jaro, higuera_mico, sift4, eudex,
     aline, phonetic_edit]
Example #4
0
class PrefixTestCases(unittest.TestCase):
    """Test prefix similarity functions.

    abydos.distance.Prefix
    """

    cmp = Prefix()

    def test_prefix_sim(self):
        """Test abydos.distance.Prefix.sim."""
        self.assertEqual(self.cmp.sim('', ''), 1)
        self.assertEqual(self.cmp.sim('a', ''), 0)
        self.assertEqual(self.cmp.sim('', 'a'), 0)
        self.assertEqual(self.cmp.sim('a', 'a'), 1)
        self.assertEqual(self.cmp.sim('ax', 'a'), 1)
        self.assertEqual(self.cmp.sim('axx', 'a'), 1)
        self.assertEqual(self.cmp.sim('ax', 'ay'), 1 / 2)
        self.assertEqual(self.cmp.sim('a', 'ay'), 1)
        self.assertEqual(self.cmp.sim('a', 'ayy'), 1)
        self.assertEqual(self.cmp.sim('ax', 'ay'), 1 / 2)
        self.assertEqual(self.cmp.sim('a', 'y'), 0)
        self.assertEqual(self.cmp.sim('y', 'a'), 0)
        self.assertEqual(self.cmp.sim('aaax', 'aaa'), 1)
        self.assertAlmostEqual(self.cmp.sim('axxx', 'aaa'), 1 / 3)
        self.assertEqual(self.cmp.sim('aaxx', 'aayy'), 1 / 2)
        self.assertEqual(self.cmp.sim('xxaa', 'yyaa'), 0)
        self.assertAlmostEqual(self.cmp.sim('aaxxx', 'aay'), 2 / 3)
        self.assertEqual(self.cmp.sim('aaxxxx', 'aayyy'), 2 / 5)
        self.assertEqual(self.cmp.sim('xa', 'a'), 0)
        self.assertEqual(self.cmp.sim('xxa', 'a'), 0)
        self.assertEqual(self.cmp.sim('xa', 'ya'), 0)
        self.assertEqual(self.cmp.sim('a', 'ya'), 0)
        self.assertEqual(self.cmp.sim('a', 'yya'), 0)
        self.assertEqual(self.cmp.sim('xa', 'ya'), 0)
        self.assertEqual(self.cmp.sim('xaaa', 'aaa'), 0)
        self.assertEqual(self.cmp.sim('xxxa', 'aaa'), 0)
        self.assertEqual(self.cmp.sim('xxxaa', 'yaa'), 0)
        self.assertEqual(self.cmp.sim('xxxxaa', 'yyyaa'), 0)

        # Test wrapper
        self.assertEqual(sim_prefix('aaxxxx', 'aayyy'), 2 / 5)

    def test_prefix_dist(self):
        """Test abydos.distance.Prefix.dist."""
        self.assertEqual(self.cmp.dist('', ''), 0)
        self.assertEqual(self.cmp.dist('a', ''), 1)
        self.assertEqual(self.cmp.dist('', 'a'), 1)
        self.assertEqual(self.cmp.dist('a', 'a'), 0)
        self.assertEqual(self.cmp.dist('ax', 'a'), 0)
        self.assertEqual(self.cmp.dist('axx', 'a'), 0)
        self.assertEqual(self.cmp.dist('ax', 'ay'), 1 / 2)
        self.assertEqual(self.cmp.dist('a', 'ay'), 0)
        self.assertEqual(self.cmp.dist('a', 'ayy'), 0)
        self.assertEqual(self.cmp.dist('ax', 'ay'), 1 / 2)
        self.assertEqual(self.cmp.dist('a', 'y'), 1)
        self.assertEqual(self.cmp.dist('y', 'a'), 1)
        self.assertEqual(self.cmp.dist('aaax', 'aaa'), 0)
        self.assertAlmostEqual(self.cmp.dist('axxx', 'aaa'), 2 / 3)
        self.assertEqual(self.cmp.dist('aaxx', 'aayy'), 1 / 2)
        self.assertEqual(self.cmp.dist('xxaa', 'yyaa'), 1)
        self.assertAlmostEqual(self.cmp.dist('aaxxx', 'aay'), 1 / 3)
        self.assertEqual(self.cmp.dist('aaxxxx', 'aayyy'), 3 / 5)
        self.assertEqual(self.cmp.dist('xa', 'a'), 1)
        self.assertEqual(self.cmp.dist('xxa', 'a'), 1)
        self.assertEqual(self.cmp.dist('xa', 'ya'), 1)
        self.assertEqual(self.cmp.dist('a', 'ya'), 1)
        self.assertEqual(self.cmp.dist('a', 'yya'), 1)
        self.assertEqual(self.cmp.dist('xa', 'ya'), 1)
        self.assertEqual(self.cmp.dist('xaaa', 'aaa'), 1)
        self.assertEqual(self.cmp.dist('xxxa', 'aaa'), 1)
        self.assertEqual(self.cmp.dist('xxxaa', 'yaa'), 1)
        self.assertEqual(self.cmp.dist('xxxxaa', 'yyyaa'), 1)

        # Test wrapper
        self.assertEqual(dist_prefix('aaxxxx', 'aayyy'), 3 / 5)
Example #5
0
    def __init__(self, model='latin', prefilter=True, allow_alt_surname=True, allow_initials=True,
                 allow_missing_components=True):

        # user-provided parameters
        self.model = model
        self.allow_alt_surname = allow_alt_surname
        self.allow_initials = allow_initials
        self.allow_missing_components = allow_missing_components
        self.prefilter = prefilter
        if self.prefilter:
            self.refined_soundex = {
                'b': 1, 'p': 1,
                'f': 2, 'v': 2,
                'c': 3, 'k': 3, 's': 3,
                'g': 4, 'j': 4,
                'q': 5, 'x': 5, 'z': 5,
                'd': 6, 't': 6,
                'l': 7,
                'm': 8, 'n': 8,
                'r': 9
            }

        # verify user-supplied class arguments
        model_dir = self.validate_parameters()

        self.impH = input_helpers.InputHelper()
        # Phonetic Encoder
        self.pe = Ainsworth()
        # Soundex Firstname Algorithm
        self.pshp_soundex_first = PSHPSoundexFirst()
        # Soundex Lastname Algorithm
        self.pshp_soundex_last = PSHPSoundexLast()

        # String Distance algorithms
        self.algos = [IterativeSubString(), BISIM(), DiscountedLevenshtein(), Prefix(), LCSstr(), MLIPNS(),
                      Strcmp95(), MRA(), Editex(), SAPS(), FlexMetric(), JaroWinkler(mode='Jaro'), HigueraMico(),
                      Sift4(), Eudex(), ALINE(), CovingtonGuard(), PhoneticEditDistance()]
        self.algo_names = ['iterativesubstring', 'bisim', 'discountedlevenshtein', 'prefix', 'lcsstr', 'mlipns',
                           'strcmp95', 'mra', 'editex', 'saps', 'flexmetric', 'jaro', 'higueramico',
                           'sift4', 'eudex', 'aline', 'covington', 'phoneticeditdistance']

        # String Distance Pipeline (Level 0/Base Model)
        self.baseModel = joblib.load(os.path.join(model_dir, 'base.pkl'))

        # Character Embedding Network (Level 0/Base Model)
        self.vocab = preprocess.VocabularyProcessor(max_document_length=15, min_frequency=0).restore(
            os.path.join(model_dir, 'vocab'))

        siamese_model = os.path.join(model_dir, 'siamese')

        # start tensorflow session
        graph = tf.Graph()
        with graph.as_default() as graph:
            self.sess = tf.Session() if tf.__version__[0] == '1' else tf.compat.v1.Session()
            with self.sess.as_default():
                # Load the saved meta graph and restore variables
                if tf.__version__[0] == '1':
                    saver = tf.train.import_meta_graph('{}.meta'.format(siamese_model))
                    self.sess.run(tf.global_variables_initializer())
                else:
                    saver = tf.compat.v1.train.import_meta_graph('{}.meta'.format(siamese_model))
                    self.sess.run(tf.compat.v1.global_variables_initializer())
                saver.restore(self.sess, siamese_model)
                # Get the placeholders from the graph by name
            self.input_x1 = graph.get_operation_by_name('input_x1').outputs[0]
            self.input_x2 = graph.get_operation_by_name('input_x2').outputs[0]

            self.dropout_keep_prob = graph.get_operation_by_name('dropout_keep_prob').outputs[0]
            self.prediction = graph.get_operation_by_name('output/distance').outputs[0]
            self.sim = graph.get_operation_by_name('accuracy/temp_sim').outputs[0]

        # Logreg (Level 1/Meta Model)
        self.metaModel = joblib.load(os.path.join(model_dir, 'meta.pkl'))

        # seen names (mapping dict from raw name to processed name)
        self.seen_names = {}
        # seen pairs (mapping dict from name pair tuple to similarity)
        self.seen_pairs = {}
        # user scores (mapping dict from name pair tuple to similarity)
        self.user_scores = {}