Пример #1
0
def find_effect_of_end(end):
    first_ends_list_without = [e for e in first_ends_list]
    second_ends_list_without = [e for e in second_ends_list]
    if end in first_ends_list_without:
        first_ends_list_without.remove(end)
    if end in second_ends_list_without:
        second_ends_list_without.remove(end)
    first_ends_list_with = [e for e in first_ends_list_without]
    first_ends_list_with.append(end)
    second_ends_list_with = [e for e in second_ends_list_without]
    second_ends_list_with.append(end)
    neither_patterns = ortho_matching.make_patterns(starts_list, vowels_list,
                                                    first_ends_list_without,
                                                    second_ends_list_without)
    first_with_patterns = ortho_matching.make_patterns(
        starts_list, vowels_list, first_ends_list_with,
        second_ends_list_without)
    second_with_patterns = ortho_matching.make_patterns(
        starts_list, vowels_list, first_ends_list_without,
        second_ends_list_with)
    first_change_dist = get_change_distribution(patterns=first_with_patterns,
                                                base_patterns=neither_patterns)
    second_change_dist = get_change_distribution(
        patterns=second_with_patterns, base_patterns=neither_patterns)
    import pdb
    pdb.set_trace()
Пример #2
0
def find_effect_of_end(end):
    first_ends_list_without = [e for e in first_ends_list]
    second_ends_list_without = [e for e in second_ends_list]
    if end in first_ends_list_without:
        first_ends_list_without.remove(end)
    if end in second_ends_list_without:
        second_ends_list_without.remove(end)
    first_ends_list_with = [e for e in first_ends_list_without]
    first_ends_list_with.append(end)
    second_ends_list_with = [e for e in second_ends_list_without]
    second_ends_list_with.append(end)
    neither_patterns = ortho_matching.make_patterns(
        starts_list, vowels_list, first_ends_list_without, second_ends_list_without
    )
    first_with_patterns = ortho_matching.make_patterns(
        starts_list, vowels_list, first_ends_list_with, second_ends_list_without
    )
    second_with_patterns = ortho_matching.make_patterns(
        starts_list, vowels_list, first_ends_list_without, second_ends_list_with
    )
    first_change_dist = get_change_distribution(patterns=first_with_patterns, base_patterns=neither_patterns)
    second_change_dist = get_change_distribution(patterns=second_with_patterns, base_patterns=neither_patterns)
    import pdb

    pdb.set_trace()
Пример #3
0
def add_end_effects():
    groups = []
    base_patterns = ortho_matching.make_patterns(starts_list, vowels_list,
                                                 first_ends_list,
                                                 second_ends_list)
    base = get_avg_strokes(base_patterns)[0]

    g2 = [g for f, g in get_group_order(k=2, n=200)]
    g3 = [g for f, g in get_group_order(k=3, n=500)]
    g4 = [g for f, g in get_group_order(k=4, n=400)]
    #g5 = [g for f, g in get_group_order(k=5, n=300)]
    #g6 = [g for f, g in get_group_order(k=6, n=200)]
    #g7 = [g for f, g in get_group_order(k=7, n=100)]
    for algroup in alphabet + alphabet_pairs + g3 + g4:  # + g5:# + g6 + g7:
        #for algroup in alphabet:
        #for algroup in ('LY', ):
        updated_first_ends_list = [v for v in first_ends_list]
        updated_first_ends_list.append(algroup.upper())
        patterns = ortho_matching.make_patterns(starts_list, vowels_list,
                                                updated_first_ends_list,
                                                second_ends_list)
        strokes, failure = get_avg_strokes(patterns)
        groups.append((base - strokes, algroup))
        print(algroup, base - strokes)
    groups.sort()
    for x in groups:
        print(x)
    print(base)
Пример #4
0
def add_start_effects():
    groups = []
    patterns = ortho_matching.make_patterns(starts_list, vowels_list, first_ends_list, second_ends_list)
    base = get_avg_strokes(patterns)[0]

    g2 = [g for f, g in get_group_order(k=2, n=200)]
    g3 = [g for f, g in get_group_order(k=3, n=500)]
    g4 = [g for f, g in get_group_order(k=4, n=400)]
    for algroup in alphabet + alphabet_pairs + g3:  # alphabet + g2 + g3 + g4:
        updated_starts_list = [v for v in starts_list]
        updated_starts_list.append(algroup.upper())
        patterns = ortho_matching.make_patterns(updated_starts_list, vowels_list, first_ends_list, second_ends_list)
        strokes, failure = get_avg_strokes(patterns)
        groups.append((base - strokes, algroup))
        print(algroup, base - strokes)
    groups.sort()
    for x in groups:
        print(x)
    print(base)
Пример #5
0
def final_effects():
    base = get_avg_strokes()[0]
    pairs = []
    for final in ("ING", "ED", "LY", "Y"):
        if len(final) > 1:
            finals_list = [final]
            patterns = ortho_matching.make_patterns(starts_list, vowels_list, ends_list, finals_list)
            strokes, failure = get_avg_strokes(patterns)
            pairs.append((base - strokes, final))
    pairs.sort()
    for x in pairs:
        print(x)
Пример #6
0
def final_effects():
    base = get_avg_strokes()[0]
    pairs = []
    for final in ('ING', 'ED', 'LY', 'Y'):
        if len(final) > 1:
            finals_list = [final]
            patterns = ortho_matching.make_patterns(starts_list, vowels_list,
                                                    ends_list, finals_list)
            strokes, failure = get_avg_strokes(patterns)
            pairs.append((base - strokes, final))
    pairs.sort()
    for x in pairs:
        print(x)
Пример #7
0
def add_start_effects():
    groups = []
    patterns = ortho_matching.make_patterns(starts_list, vowels_list,
                                            first_ends_list, second_ends_list)
    base = get_avg_strokes(patterns)[0]

    g2 = [g for f, g in get_group_order(k=2, n=200)]
    g3 = [g for f, g in get_group_order(k=3, n=500)]
    g4 = [g for f, g in get_group_order(k=4, n=400)]
    for algroup in alphabet + alphabet_pairs + g3:  #alphabet + g2 + g3 + g4:
        updated_starts_list = [v for v in starts_list]
        updated_starts_list.append(algroup.upper())
        patterns = ortho_matching.make_patterns(updated_starts_list,
                                                vowels_list, first_ends_list,
                                                second_ends_list)
        strokes, failure = get_avg_strokes(patterns)
        groups.append((base - strokes, algroup))
        print(algroup, base - strokes)
    groups.sort()
    for x in groups:
        print(x)
    print(base)
Пример #8
0
def start_effects():
    data = []
    for start in starts_list:
        updated_starts_list = [v for v in starts_list]
        if start in updated_starts_list:
            updated_starts_list.remove(start)
        patterns = ortho_matching.make_patterns(updated_starts_list, vowels_list, first_ends_list, second_ends_list)
        effects, words = distribution_of_improvement(patterns, removed=True)
        data.append((effects[-1], start, effects, words))
    data.sort(reverse=True)
    for e, start, effects, words in data:
        print("*************************")
        print(start, effects[-1])
Пример #9
0
def vowel_effects():
    base = get_avg_strokes()[0]
    pairs = []
    for vowel in vowels_list:
        if len(vowel) > 1:
            updated_vowels_list = [v for v in vowels_list]
            updated_vowels_list.remove(vowel)
            patterns = ortho_matching.make_patterns(starts_list, updated_vowels_list, first_ends_list, second_ends_list)
            strokes, failure = get_avg_strokes(patterns)
            pairs.append((strokes - base, vowel))
    pairs.sort()
    for x in pairs:
        print(x)
Пример #10
0
def start_effects():
    data = []
    for start in starts_list:
        updated_starts_list = [v for v in starts_list]
        if start in updated_starts_list:
            updated_starts_list.remove(start)
        patterns = ortho_matching.make_patterns(updated_starts_list,
                                                vowels_list, first_ends_list,
                                                second_ends_list)
        effects, words = distribution_of_improvement(patterns, removed=True)
        data.append((effects[-1], start, effects, words))
    data.sort(reverse=True)
    for e, start, effects, words in data:
        print('*************************')
        print(start, effects[-1])
Пример #11
0
def add_end_effects():
    groups = []
    base_patterns = ortho_matching.make_patterns(starts_list, vowels_list, first_ends_list, second_ends_list)
    base = get_avg_strokes(base_patterns)[0]

    g2 = [g for f, g in get_group_order(k=2, n=200)]
    g3 = [g for f, g in get_group_order(k=3, n=500)]
    g4 = [g for f, g in get_group_order(k=4, n=400)]
    # g5 = [g for f, g in get_group_order(k=5, n=300)]
    # g6 = [g for f, g in get_group_order(k=6, n=200)]
    # g7 = [g for f, g in get_group_order(k=7, n=100)]
    for algroup in alphabet + alphabet_pairs + g3 + g4:  # + g5:# + g6 + g7:
        # for algroup in alphabet:
        # for algroup in ('LY', ):
        updated_first_ends_list = [v for v in first_ends_list]
        updated_first_ends_list.append(algroup.upper())
        patterns = ortho_matching.make_patterns(starts_list, vowels_list, updated_first_ends_list, second_ends_list)
        strokes, failure = get_avg_strokes(patterns)
        groups.append((base - strokes, algroup))
        print(algroup, base - strokes)
    groups.sort()
    for x in groups:
        print(x)
    print(base)
Пример #12
0
def vowel_effects():
    base = get_avg_strokes()[0]
    pairs = []
    for vowel in vowels_list:
        if len(vowel) > 1:
            updated_vowels_list = [v for v in vowels_list]
            updated_vowels_list.remove(vowel)
            patterns = ortho_matching.make_patterns(starts_list,
                                                    updated_vowels_list,
                                                    first_ends_list,
                                                    second_ends_list)
            strokes, failure = get_avg_strokes(patterns)
            pairs.append((strokes - base, vowel))
    pairs.sort()
    for x in pairs:
        print(x)
Пример #13
0
def second_to_first_end_effects():
    data = []
    for end in second_ends_list:
        updated_first_ends_list = [v for v in first_ends_list]
        updated_second_ends_list = [v for v in second_ends_list]
        if end in first_ends_list:
            updated_first_ends_list.append(end)
        if end in second_ends_list:
            updated_second_ends_list.remove(end)
        patterns = ortho_matching.make_patterns(starts_list, vowels_list,
                                                updated_first_ends_list,
                                                updated_second_ends_list)
        effects, words = distribution_of_improvement(patterns)
        data.append((effects[-1], end, effects, words))
    data.sort(reverse=True)
    for e, end, effects, words in data:
        print(end, effects[-1])
Пример #14
0
def second_to_first_end_effects():
    data = []
    for end in second_ends_list:
        updated_first_ends_list = [v for v in first_ends_list]
        updated_second_ends_list = [v for v in second_ends_list]
        if end in first_ends_list:
            updated_first_ends_list.append(end)
        if end in second_ends_list:
            updated_second_ends_list.remove(end)
        patterns = ortho_matching.make_patterns(
            starts_list, vowels_list, updated_first_ends_list, updated_second_ends_list
        )
        effects, words = distribution_of_improvement(patterns)
        data.append((effects[-1], end, effects, words))
    data.sort(reverse=True)
    for e, end, effects, words in data:
        print(end, effects[-1])
Пример #15
0
    second_with_patterns = ortho_matching.make_patterns(
        starts_list, vowels_list, first_ends_list_without,
        second_ends_list_with)
    first_change_dist = get_change_distribution(patterns=first_with_patterns,
                                                base_patterns=neither_patterns)
    second_change_dist = get_change_distribution(
        patterns=second_with_patterns, base_patterns=neither_patterns)
    import pdb
    pdb.set_trace()


#end_effects()
#        print('null', start, vowel, end)end_effects()
#second_to_first_end_effects()
#test_first()
#print(len(starts_list))
#print(len(ends_list))

updated_first_ends_list = [v for v in first_ends_list]
updated_second_ends_list = [v for v in second_ends_list]
end = TESTEND
if end in first_ends_list:
    updated_first_ends_list.remove(end)
if end in second_ends_list:
    updated_second_ends_list.remove(end)
patterns = ortho_matching.make_patterns(starts_list, vowels_list,
                                        updated_first_ends_list,
                                        updated_second_ends_list)
base = get_avg_strokes(patterns)[0]
print(base)
Пример #16
0
    first_with_patterns = ortho_matching.make_patterns(
        starts_list, vowels_list, first_ends_list_with, second_ends_list_without
    )
    second_with_patterns = ortho_matching.make_patterns(
        starts_list, vowels_list, first_ends_list_without, second_ends_list_with
    )
    first_change_dist = get_change_distribution(patterns=first_with_patterns, base_patterns=neither_patterns)
    second_change_dist = get_change_distribution(patterns=second_with_patterns, base_patterns=neither_patterns)
    import pdb

    pdb.set_trace()


# end_effects()
#        print('null', start, vowel, end)end_effects()
# second_to_first_end_effects()
# test_first()
# print(len(starts_list))
# print(len(ends_list))

updated_first_ends_list = [v for v in first_ends_list]
updated_second_ends_list = [v for v in second_ends_list]
end = TESTEND
if end in first_ends_list:
    updated_first_ends_list.remove(end)
if end in second_ends_list:
    updated_second_ends_list.remove(end)
patterns = ortho_matching.make_patterns(starts_list, vowels_list, updated_first_ends_list, updated_second_ends_list)
base = get_avg_strokes(patterns)[0]
print(base)