예제 #1
0
    def __init__(self,
                 surface_name,
                 generators,
                 automorph,
                 MCG_must_contain,
                 word_filter=basic_filter,
                 manifold_filter=basic_filter,
                 options=None):
        self.surface_name = surface_name
        self.generators = generators
        self.automorph = automorph
        self.MCG_must_contain = MCG_must_contain
        self.word_filter = word_filter
        self.manifold_filter = manifold_filter
        self.options = options if options is not None else Options()

        self.surfaces = SimpleNamespace(
            twister=snappy.twister.Surface(self.surface_name),
            flipper=flipper.load(self.surface_name),
            curver=curver.load(self.surface_name),
        )
        self.word_generator = WordGenerator(self.generators, self.automorph,
                                            self.MCG_must_contain,
                                            self.word_filter, self.surfaces,
                                            self.options)

        for path in [
                self.options.word_parts, self.options.properties_parts,
                self.options.word, self.options.properties, self.options.census
        ]:
            os.makedirs(os.path.dirname(path), exist_ok=True)
예제 #2
0
 def test_isometries(self):
     tests = [('S_0_4', 2), ('S_1_1', 6), ('S_1_2', 4), ('S_2_1', 2),
              ('S_3_1', 2), ('E_12', 12), ('E_24', 24), ('E_36', 36)]
     for surface, num_isoms in tests:
         self.assertEqual(
             len(flipper.load(surface).triangulation.self_isometries()),
             num_isoms)
예제 #3
0
 def test_sig(self):
     for surface in [
             'S_0_4', 'S_1_1', 'S_1_2', 'S_2_1', 'S_3_1', 'E_12', 'E_24',
             'E_36'
     ]:
         T = flipper.load(surface).triangulation
         T2 = flipper.triangulation_from_iso_sig(T.iso_sig())
         self.assertTrue(T.is_isometric_to(T2))
         self.assertEqual(T.iso_sig(), T2.iso_sig())
예제 #4
0
 def test_invariant_lamination(self):
     # Add more tests here.
     tests = [
         ('S_1_1', 'a'),
         ('S_1_2', 'a'),
         ('S_1_2', 'b'),
         ('S_1_2', 'c'),
         ('S_1_2', 'aB'),
         ('S_1_2', 'bbaCBAaBabcABB'),
         ('S_1_2', 'aCBACBacbaccbAaAAaBBcCcBBcCaBaaaABBabBcaBbCBCbaaa'),
         ('S_2_1', 'aaabcd'),
         # ('E_12', 'aaaaBBc'),  # Really slow.
         # ('E_12', 'aaBaaBBc)'  # Really slow.
         # ('E_12', 'aaaaBBaBaBc')  # Really slow useful for profiling. Current best time 102s.
     ]
     try:
         for surface, word in tests:
             S = flipper.load(surface)
             mapping_class = S.mapping_class(word)
             mapping_class.invariant_lamination()
     except flipper.AssumptionError:
         pass  # mapping_class is not pseudo-Anosov.
예제 #5
0
 def test_random_word(self):
     S = flipper.load('S_1_2')
     all_words = set(S.all_words(5, equivalence='none'))
     for _ in range(10):
         word = S.random_word(5)
         self.assertIn(word, all_words)
예제 #6
0
 def test_composition(self):
     S = flipper.load('S_1_2')
     self.assertEqual(S.mapping_class('abababababab'),
                      S.mapping_class('xx'))
예제 #7
0
from time import time
import snappy
import flipper

for _, row in flipper.census('CHW').iterrows():
    start_time = time()
    M = snappy.Manifold(row.manifold)
    N = snappy.Manifold(
        flipper.load(row.surface).mapping_class(row.monodromy).bundle())
    assert M.is_isometric_to(N)  # Never fails for these examples.
    print('Matched %s over %s with %s in %0.3fs.' %
          (row.monodromy, row.surface, row.manifold, time() - start_time))
예제 #8
0
import flipper

S = flipper.load('S_1_2')
word = 'aCBACBacbaccbAaAcAaBBcCcBBcCaBaaaABBabBcaBbCBCbaaa'

h = S.mapping_class(word)
print('Built the mapping class h := \'%s\'.' % word)

print('h has order %s (where 0 == infinite).' % h.order())
print('h is %s.' % h.nielsen_thurston_type())

try:
    print('h leaves L := %s projectively invariant.' %
          h.invariant_lamination().projective_string())
    print('and dilates it by a factor of %s.' % h.dilatation())
except flipper.AssumptionError:
    print(
        'We cannot find a projectively invariant lamination for h as it is not pseudo-Anosov.'
    )
예제 #9
0
import flipper

length = 10
num_samples = 100

S = flipper.load('S_2_1')
for i in range(length):
    pA_samples = sum(1 if S.mapping_class(i).is_pseudo_anosov() else 0
                     for _ in range(num_samples))
    print('Length %d: %0.1f%% pA' % (i, float(pA_samples) * 100 / num_samples))
예제 #10
0
파일: twister.py 프로젝트: saraedum/flipper
def match(surface, monodromy):
    M = snappy.twister.Surface(surface).bundle(monodromy)
    N = snappy.Manifold(
        flipper.load(surface).mapping_class(monodromy).bundle())
    return M.is_isometric_to(N)
예제 #11
0
import snappy
import flipper

# A pseudo-Anosov mapping class.
h = flipper.load('S_1_2').mapping_class('abC')

# Build Agol's veering triangulation of the bundle.
# This will fail with an AssumptionError if h is not pseudo-Anosov.
bundle = h.bundle()

print('It has %d cusp(s) with the following properties:' %
      bundle.triangulation3.num_cusps)
for index, (real, fibre, degeneracy) in enumerate(
        zip(bundle.cusp_types(), bundle.fibre_slopes(),
            bundle.degeneracy_slopes())):
    print('\tCusp %s (%s): Fibre slope %s, degeneracy slope %s' %
          (index, 'Real' if real else 'Fake', fibre, degeneracy))

# Fake cusps filled.
M = snappy.Manifold(bundle)
print(M.identify())

# Can also build a non-veering triangulation of the bundle.
# This works for all mapping classes.
M2 = snappy.Manifold(h.bundle(veering=False))
print(M2.identify())

# If we don't fill the fake cusps we may get a differnt manifold.
N = snappy.Manifold(bundle.snappy_string(filled=False))
print(N.identify())
예제 #12
0
from time import time
import flipper

length = 7
S = flipper.load('S_1_2')  # Get an EquippedTriangulation.

start_time = time()
all_words = list(S.all_words(length))
print('Built %d words in %0.3fs.' % (len(all_words), time() - start_time))

# In parallel:
start_time = time()
all_words2 = list(S.all_words(length, cores=2))
print('Built %d words in %0.3fs.' % (len(all_words2), time() - start_time))

assert len(all_words) == len(
    set(all_words)) and set(all_words) == set(all_words2)
예제 #13
0
## this is a python executable that will be called from GH to get curve data using Connecting_Points.py
import Connecting_Points_0409 as cp
import Reordering_points as rp
import Graph_order as go
from sys import argv
import flipper
script, init_curve, mapping_class, iterations = argv
#mapping_class = 'aBeFcdAE'
#init_curve = 'b'
#iterations = 2
triangulation = [(1, 8, 0), (7, ~3, ~8), (2, 6, ~7), (5, ~2, ~6), (3, 4, ~5),
                 (~0, ~1, ~4)]
edges = [1, 6, 7, 8]
S = flipper.load('S_2_1')
h = S.mapping_class(mapping_class)
curve = S.lamination(init_curve)
for i in range(int(iterations)):
    curve = h(curve)
oct_out = cp.octagon_only(cp.connecting(triangulation, curve.geometric), edges)
reordered = rp.reorder_list(oct_out)
perm = []
for connection in oct_out:
    i = reordered.index(connection)
    perm.append(i)
D = {1: 3, -2: 7, 6: 1, -7: 5, 7: 2, -8: 6, 8: 0, -9: 4}
new_out = []
for tup in reordered:
    new_tup = ((D[tup[0][0]], tup[0][1]), (D[tup[1][0]], tup[1][1]))
    new_out.append(new_tup)
newer_out = []
예제 #14
0
from time import time
import flipper

times = {}
surface = 'S_3_1'
length = 20
num_samples = 100

S = flipper.load(surface)
for index in range(num_samples):
    monodromy = S.random_word(length)
    h = S.mapping_class(monodromy)
    start_time = time()
    try:
        h.invariant_lamination()
        times[monodromy] = time() - start_time
        print('%3d/%d: %s %s, Time: %0.3f' %
              (index + 1, num_samples, surface, monodromy, times[monodromy]))
    except flipper.AssumptionError:
        times[monodromy] = time() - start_time
        print('%3d/%d: %s %s, not pA, Time: %0.3f' %
              (index + 1, num_samples, surface, monodromy, times[monodromy]))

print('Average time: %0.3f' % (sum(times.values()) / num_samples))
print(
    'Slowest: %s, Time: %0.3f' %
    (max(times, key=lambda w: times[w]).replace('.', ''), max(times.values())))
print('Total time: %0.3f' % sum(times.values()))