Example #1
0
def load_cached_gens_from_file(prec):
    current_dir = os.path.dirname(os.path.abspath(__file__))
    cached_dir = os.path.join(current_dir, "cached_data")
    prec39 = PrecisionDeg2(39)
    prec34_m17_51 = PrecisionDeg2([(34, -17, 51)])

    if Deg2global_gens_dict != {}:
        a_ky = Deg2global_gens_dict.keys()[0]
        if Deg2global_gens_dict[a_ky].prec >= prec:
            return None

    if prec <= prec39 or set(prec) <= set(prec39) | set(prec34_m17_51):
        if prec <= PrecisionDeg2(21):
            gens_dct = load(os.path.join(cached_dir, '_fc_dict21.sobj'))
            max_prec = PrecisionDeg2(21)
        elif prec <= prec39:
            gens_dct = load(os.path.join(cached_dir, '_fc_dict39.sobj'))
            max_prec = prec39
        else:
            gens_dct1 = load(os.path.join(cached_dir, '_fc_dict39.sobj'))
            gens_dct2 = load(os.path.join(cached_dir,
                                          '_fc_dict_tuples_34_-17_51.sobj'))
            for k in gens_dct1.keys():
                gens_dct1[k].update(gens_dct2[k])
            gens_dct = {k: {t: gens_dct1[k][t] for t in prec}
                        for k in gens_dct1.keys()}
            max_prec = prec
        es4 = ModFormQexpLevel1(4, gens_dct[4], max_prec)
        es6 = ModFormQexpLevel1(6, gens_dct[6], max_prec)
        x10 = ModFormQexpLevel1(10, gens_dct[10], max_prec)
        x12 = ModFormQexpLevel1(12, gens_dct[12], max_prec)
        x35 = ModFormQexpLevel1(35, gens_dct[35], max_prec)
        d = {"es4": es4, "es6": es6, "x10": x10, "x12": x12, "x35": x35}
        for k, v in d.items():
            Deg2global_gens_dict[k] = v
Example #2
0
def load_ambient_space(N, k, i):
    fname = filenames.ambient(N, k, i, makedir=False)
    if os.path.exists(fname):
        return dict_to_ambient(load(fname))
    fname = filenames.M(N, k, i, makedir=False)
    if os.path.exists(fname):
        return load(fname)
    raise ValueError, "ambient space (%s,%s,%s) not yet computed"%(N,k,i)
Example #3
0
 def load_basis_from(self, filename):
     dicts = load(filename)
     prec = dicts[0]["prec"]
     if self.prec > PrecisionDeg2._from_dict_to_object(prec):
         msg = "self.prec must be less than {prec}".format(prec=prec)
         raise RuntimeError(msg)
     basis = [ModFormQexpLevel1._from_dict_to_object(dct)
              for dct in dicts]
     self.__basis_cached = True
     self.__cached_basis = basis
Example #4
0
def load_factor(N, k, i, d, M=None):
    import sage.modular.modsym.subspace
    if M is None:
        M = load_ambient_space(N, k, i)
    f = filenames.factor(N, k, i, d, makedir=False)
    if not os.path.exists(f):
        raise RuntimeError, "no such factor (%s,%s,%s,%s)"%(N,k,i,d)
    B = load(filenames.factor_basis_matrix(N, k, i, d))
    Bd = load(filenames.factor_dual_basis_matrix(N, k, i, d))
    v = load(filenames.factor_dual_eigenvector(N, k, i, d))
    nz = load(filenames.factor_eigen_nonzero(N, k, i, d))
    B._cache['in_echelon_form'] = True
    Bd._cache['in_echelon_form'] = True
    # These two lines are scary, obviously, since they depend on
    # private attributes of modular symbols.
    A = sage.modular.modsym.subspace.ModularSymbolsSubspace(M, B.row_module(), Bd.row_module(), check=False)
    A._HeckeModule_free_module__dual_eigenvector = {('a',nz):(v,False)}
    A._is_simple = True
    A._HeckeModule_free_module__decomposition = {(None,True):Sequence([A], check=False)}
    return A
Example #5
0
def compute_simple_shimura_curves(path=''):
    l = load(path + '/lattices_aniso_1_notgamma0.sobj')
    simshim = []
    aniso_shim_syms = []
    for Q in l:
        M = FiniteQuadraticModule(Q.matrix())
        s = GenusSymbol(M.jordan_decomposition().genus_symbol())
        aniso_shim_syms.append(s)
    G = SimpleModulesGraph2n(1, None)
    G.compute_from_startpoints(aniso_shim_syms)
    simshim.extend(G._simple)
    return simshim
Example #6
0
def compute_simple_shimura_curves(path=''):
    l = load(path + '/lattices_aniso_1_notgamma0.sobj')
    simshim = []
    aniso_shim_syms=[]
    for Q in l:
        M = FiniteQuadraticModule(Q.matrix())
        s = GenusSymbol(M.jordan_decomposition().genus_symbol())
        aniso_shim_syms.append(s)
    G = SimpleModulesGraph2n(1,None)
    G.compute_from_startpoints(aniso_shim_syms)
    simshim.extend(G._simple)
    return simshim
Example #7
0
#!/usr/bin/env sage

import argparse
import csv
import os
import sys
from multiprocessing import Pool
from sage.all import load, RR
load(os.path.dirname(os.path.realpath(__file__))+'/load-deps.py')

parser = argparse.ArgumentParser(description='ricci curvature of tangles',
                                 prog='ricci-tangle.py')

parser.add_argument('idx_path',
                    type=str, help='Path to .idx file.')
parser.add_argument('--graph', dest='graph_path',
                    type=str, help='Path to graph in SAGE object format.')
parser.add_argument('--out', dest='out_path',
                    type=str, help='Output path.')
parser.add_argument('--walk', choices=['lurw', 'upmh'],
                    default='lurw', help='What random walk to use.')
args = parser.parse_args()
assert(os.path.exists(args.idx_path))
assert(os.path.exists(args.graph_path))
g = load(args.graph_path)


def process_line(line):
    row = list(line)
    t1_idx = int(row[0])
    t2_idx = int(row[1])
Example #8
0
from sage.all import EllipticCurve, load, save

from testcong import make_hash, test_cong, test_irred, report
import sys

try:
    hashtab11_50 = load('hashtab11_50')
except IOError:
    hashtab11_50 = make_hash(11,11,500000,50)
    hashtab11_50 = dict([(k,v) for k,v in hashtab11_50.items() if len(v)>1])
    save(hashtab11_50, 'hashtab11_50')
    len(hashtab11_50)

def find_bad_pairs(ht=hashtab11_50):
    bad_pairs = []
    for s in ht.values():
        if len(s)>1:
            E1 = EllipticCurve(s[0])
            for r in s[1:]:
                E2 = EllipticCurve(r)
                res, info = test_cong(11,E1,E2, mumax=10^7)
                if not res:
                    report(res,info,11,s[0],r)
                    bad_pairs.append([s[0],r])
    return bad_pairs

# bad_pairs = find_bad_pairs(hashtab7_50)
# previous cell takes ages; this is the output

bad_pairs = []
isom_sets = [s for s in hashtab11_50.values() if len(s)>1]
Example #9
0
def load_min_resol_prim(i, parity):
    fname = join(DATA_DIR, "str%s_%s_cand.sobj" % (i, parity))
    return load(fname)
Example #10
0
# This file was *autogenerated* from the file TFHE.sage
from sage.all_cmdline import *  # import sage library

_sage_const_1024 = Integer(1024)
_sage_const_2 = Integer(2)
_sage_const_26 = Integer(26)
_sage_const_32 = Integer(32)
_sage_const_1 = Integer(1)
_sage_const_0p99 = RealNumber('0.99')
_sage_const_0p292 = RealNumber('0.292')
_sage_const_0p265 = RealNumber(
    '0.265'
)  # To reproduce the estimate run this snippet on http://aleph.sagemath.org/
from sage.all import load, sqrt, RR, ZZ, pi, oo
load('https://bitbucket.org/malb/lwe-estimator/raw/HEAD/estimator.py')

n = _sage_const_1024  # ciphertext dimension (also, key entropy)
sd = _sage_const_2**(-_sage_const_26)  # noise standard deviation
alpha = sqrt(
    _sage_const_2 * pi) * sd  # estimator defines noise rate = sqrt(2pi).stdev
q = _sage_const_2**_sage_const_32  # for compatibility only
m = oo  # the attacker can use as many samples he wishes
secret_distribution = (-_sage_const_1, _sage_const_1)
success_probability = _sage_const_0p99

# Chosen cost model
# BKZ cost models: CLASSICAL - 0.292*beta + 16.4 + log(8*d,2) - primal
# i.e. BKZ.sieve =  lambda beta, d, B: ZZ(2)**RR(0.292*beta + 16.4 + log(8*d,2))
print("CLASSICAL PRIMAL")
print(
    primal_usvp(n,
Example #11
0
def load_wts_brs(i):
    brs = load(join(DATA_DIR, "str%s_brs.sobj" % i))
    wts = load(join(DATA_DIR, "str%s_weights.sobj" % i))
    return FormsData(wts, [to_pol_over_z(p) for p in brs])
import os, sys
from sage.all import load
os.chdir("/home/edgarcosta/lmfdb/")
sys.path.append("/home/edgarcosta/lmfdb/")
import lmfdb
db = lmfdb.db_backend.db
DelayCommit = lmfdb.db_backend.DelayCommit
load("/home/edgarcosta/lmfdb-gce/transition_scripts/export_special.py")


def backup():
    import subprocess, datetime
    timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M")
    userdbdump = "/scratch/postgres-backup/userdb-backup-%s.tar" % timestamp
    knowlsdump = "/scratch/postgres-backup/knowls-backup-%s.tar" % timestamp
    a = subprocess.check_call([
        "sudo", "-u", "postgres", "pg_dump", "--clean", "--if-exists",
        "--schema=userdb", "--file", userdbdump, "--format", "tar", "lmfdb"
    ])
    b = subprocess.check_call([
        "sudo", "-u", "postgres", "pg_dump", "--clean", "--if-exists",
        "--schema=public", "-t", 'kwl_knowls', "-t", "kwl_deleted", "-t",
        "kwl_history", "--file", knowlsdump, "--format", "tar", "lmfdb"
    ],
                              stderr=subprocess.STDOUT)
    if a + b != 0:
        print "Failed to backup users and kwl_*"
        raise ValueError
    print "Succeeded in backing up knowls and userdb"
    return a + b
Example #13
0
def load_star_norms(i):
    return [to_pol_over_z(d) for d in load(join(DATA_DIR, "str%s_star_norms.sobj" % i))]
Example #14
0
def load_from_data_dir(fname, dirname):
    return load(opath.join(data_dir(dirname), fname))
Example #15
0
def load_M(N, k, i):
    return load(filenames.M(N, k, i, makedir=False))
Example #16
0
    def find_missing(self, Nrange, krange, irange, fields=None):
        """
        Return generator of
        
             {'N':N, 'k':k, 'i':i, 'missing':missing, ...},

        where missing is in the intersection of fields and the
        following strings (or all strings if fields is None):

                'M', 'decomp',
                'aplist-00100',  'aplist-00100-01000',  'aplist-01000-10000',
                'charpoly-00100','charpoly-00100-01000','charpoly-01000-10000',
                'zeros', 
                'leading', 
                'atkin_lehner'

        If the string is not 'M' or 'decomp' then the meaning is that
        the indicated data is not complete for *all* newforms in this
        space, i.e., at least one is missing.  If 'decomp' is given,
        it means the decomp isn't complete (it could be partial).
        
        Spaces with dimension 0 are totally ignored. 

        Note that not every missing is listed, just the *next* one that
        needs to be computed, e.g., if (11,2,0,'M') is output, then
        nothing else for (11,2,0,*) will be output.
        """
        if fields is None:
            fields = set(['M', 'decomp',
                'aplist-00100',  'aplist-00100-01000',  'aplist-01000-10000',
                'charpoly-00100','charpoly-00100-01000','charpoly-01000-10000',
                'zeros', 
                'leading', 
                'atkin_lehner'])
        else:
            assert isinstance(fields, (list, tuple, str))
            fields = set(fields)

        space_params = set(os.listdir(self._data))
        for k in rangify(krange):
            for N in rangify(Nrange):
                for ch in rangify(irange):
                    
                    if isinstance(ch, str):
                        CHI = list(enumerate(characters(N)))
                        if ch == 'quadratic':
                            CHI = [(i,chi) for i,chi in CHI if chi.order()==2]
                        elif ch == 'all':
                            pass
                        else:
                            raise ValueError
                    else:
                        try:
                            CHI = [(ch, character(N, ch))]
                        except IndexError:
                            CHI = []
                            
                    for i, chi in CHI:
                        N,k,i = int(N), int(k), int(i)
                        obj = {'space':(N,k,i)}
                        dim = dim_new(chi, k)
                        if dim > 0:
                            fields0 = list(fields)
                            if 'M' in fields0:
                                fields0.remove('M')
                            if 'decomp' in fields0:
                                fields0.remove('decomp')
                            Nki = self.space_name(N,k,i)
                            d3 = os.path.join(self._data, Nki)
                            if Nki not in space_params or not os.path.exists(os.path.join(d3, 'M-meta.sobj')):
                                if 'M' in fields:
                                    obj2 = dict(obj)
                                    obj2['missing'] = 'M'
                                    yield obj2
                                break
                            newforms = []
                            for fname in os.listdir(d3):
                                if fname.isdigit():
                                    # directory containing data about a newforms
                                    d2 = os.path.join(d3, fname)
                                    deg = os.path.join(d2, 'degree.txt')
                                    if os.path.exists(deg):
                                        degree = eval(open(deg).read())
                                    else:
                                        B_file = os.path.join(d2, 'B.sobj')
                                        if not os.path.exists(B_file):
                                            degree = None
                                        else:
                                            degree = load(B_file).nrows()
                                            open(os.path.join(d2, 'degree.txt'),'w').write(str(degree))
                                    f = {'fname':fname, 'degree':degree,
                                         'other':set([x.split('.')[0] for x in os.listdir(d2)])}
                                    newforms.append(f)
                            degs = [f['degree'] for f in newforms]
                            if None in degs:
                                sum_deg = None
                            else:
                                sum_deg = sum(degs)
                            if 'decomp' in fields and (sum_deg is None or sum_deg != dim):
                                obj2 = dict(obj)
                                obj2['missing'] = 'decomp'
                                obj2['newforms'] = newforms
                                if sum_deg > dim:
                                    obj2['bug'] = 'sum of degrees (=%s) is too big (should be %s) -- internal consistency error!'%(sum_deg, dim)
                                yield obj2
                                break
                            missing = []
                            for other in fields0:
                                for j in range(len(newforms)):
                                    if other not in newforms[j]['other']:
                                        missing.append(other)
                                        break
                            if missing:
                                missing.sort()
                                obj2 = dict(obj)
                                obj2['missing'] = 'other'
                                obj2['other'] = missing
                                yield obj2
Example #17
0
import os
from sage.all import load

dir = os.getcwd()
try:
    load(os.path.join(dir, "neuralcode.py"))
    load(os.path.join(dir, "iterative_canonical.spyx"))
    load(os.path.join(dir, "examples.py"))
    load(os.path.join(dir, "visualization.py"))
    print("\nAll files loaded.")
except ValueError:
    print(
        "Files not loaded! Ensure all files are uploaded to the root directory."
    )
    raise ValueError
Example #18
0
def load_min_resol_prim(i):
    fname = join(DATA_DIR, "str%s_cand.sobj" % i)
    return load(fname)
Example #19
0
def gram_matrix(N,
                weight,
                prec=501,
                tol=1E-40,
                sv_min=1E-1,
                sv_max=1E15,
                bl=None,
                set_dim=None,
                force_prec=False):
    r""" Computes a matrix of p_{r,D}(r',D')
    for a basis of P_{r,D}, i.e. dim linearly independent P's
    INPUT: N      = Integer
           weight = Real
    OPTIONAL: 
           tol    = error bound for the Poincaré series
           sv_min = minimal allowed singular value when determining whether a given set is linarly independent or not.
           sv_max = maximally allowed singular value
           bl     = list of pairs (D_i,r_i) from which  we compute a matrix of coeffficients p_{D_i,r_i}(D_j,r_j)
        """
    # If we have supplied a list of D's and r's we make a gram matrix relative to these
    # otherwise we find a basis, i.e. linearly independent forms with correct dimension
    # find the dimension
    wt = '%.4f' % weight
    if (N < 10):
        stN = "0" + str(N)
    else:
        stN = str(N)
    v = dict()
    filename_work = "__N" + stN + "-" + wt + "--finding basis.txt"
    fp = open(filename_work, "write")
    fp.write("starting to find basis")
    fp.close()
    if (silent > 0):
        print("Forcing precision:{0}".format(force_prec))
    set_verbose(0)
    if (bl != None):
        dim = len(bl)
        l = bl
    else:
        if (set_dim != None and set_dim > 0):
            dim = set_dim
        else:
            dim = dimension_jac_cusp_forms(int(weight + 0.5), N, -1)
        l = list_of_basis(N, weight, prec, tol, sv_min, sv_max, set_dim=dim)
    j = 0
    for [D, r] in l.values():
        for [Dp, rp] in l.values():
            # Recall that the gram matrix is symmetric. We need only compute the upper diagonal
            if (list(v.values()).count([Dp, rp, D, r]) == 0):
                v[j] = [D, r, Dp, rp]
                j = j + 1
    # now v is a list we can get into computing coefficients
    # first we print the "gram data" (list of indices) to the file
    s = str(N) + ": (AI[" + str(N) + "],["
    indices = dict()
    for j in range(len(l)):
        Delta = l[j][0]
        r = l[j][1]
        diff = (r * r - Delta) % (4 * N)
        if diff != 0:
            raise ValueError(
                "ERROR r^2={0} not congruent to Delta={1} mod {2}!".format(
                    r * r, Delta, 4 * N))
        s = s + "(" + str(Delta) + "," + str(r) + ")"
        indices[j] = [Delta, r]
        if j < len(l) - 1:
            s = s + ","
        else:
            s = s + "]),"
    s = s + "\n"
    if silent > 0:
        print(s + "\n")
    filename2 = "PS_Gramdata" + stN + "-" + wt + ".txt"
    fp = open(filename2, "write")
    fp.write(s)
    fp.close()
    try:
        os.remove(filename_work)
    except os.error:
        print("Could not remove file:{0}".format(filename_work))
        pass
    filename_work = "__N" + stN + "-" + wt + "--computing_gram_matrix.txt"
    fp = open(filename_work, "write")
    fp.write("")
    fp.close()
    #print "tol=",tol
    #set_verbose(2)
    #print "force_prec(gram_mat)=",force_prec
    res = ps_coefficients_holomorphic_vec(N,
                                          weight,
                                          v,
                                          tol,
                                          prec,
                                          force_prec=force_prec)
    set_verbose(0)

    res['indices'] = indices
    maxerr = 0.0
    for j in res['errs'].keys():
        tmperr = abs(res['errs'][j])
        #print "err(",j,")=",tmperr
        if (tmperr > maxerr):
            maxerr = tmperr
        # switch format for easier vewing
        res['errs'][j] = RR(tmperr)
    if silent > 0:
        print("maxerr={0}".format(RR(maxerr)))
    res['maxerr'] = maxerr
    wt_phalf = '%.4f' % (weight + 0.5)
    filename3 = "PS_Gramerr" + stN + "-" + wt + ".txt"
    fp = open(filename3, "write")
    wt
    s = "MAXERR[" + wt_phalf + "][" + stN + "]=" + str(RR(maxerr))
    fp.write(s)
    fp.close()
    if (res['ok']):
        Cps = res['data']
    else:
        print("Failed to compute Fourier coefficients!")
        return 0
    RF = RealField(prec)
    A = matrix(RF, dim)
    kappa = weight
    fourpi = RF(4.0) * pi.n(prec)
    one = RF(1.0)
    N4 = RF(4 * N)
    C = dict()
    if (silent > 1):
        print("v={0}".format(v))
        print("dim={0}".format(dim))
    lastix = 0
    # First set the upper right part of A
    for j in range(dim):
        ddim = dim - j
        if (silent > 1):
            print("j={0} ddim={1} lastix={2]".format(j, ddim, lastix))
        for k in range(0, ddim):
            # need to scale with |D|^(k+0.5)
            if (silent > 1):
                print("k={0}".format(k))
                print("lastix+k={0}".format(lastix + k))
            mm = RF(abs(v[lastix + k][0])) / N4
            tmp = RF(mm**(weight - one))
            if (silent > 1):
                print("ddim+k={0}".format(ddim + k))
            A[j, j + k] = Cps[lastix + k] * tmp
            C[v[lastix + k][0], v[lastix + k][1]] = Cps[lastix + k]
        lastix = lastix + k + 1
    # And add the lower triangular part to mak the matrix symmetric
    for j in range(dim):
        for k in range(0, j):
            A[j, k] = A[k, j]
    # And print the gram matrix
    res['matrix'] = A
    dold = mpmath.mp.dps
    mpmath.mp.dps = int(prec / 3.3)
    AInt = mpmath.matrix(int(A.nrows()), int(A.ncols()))
    AMp = mpmath.matrix(int(A.nrows()), int(A.ncols()))
    for ir in range(A.nrows()):
        for ik in range(A.ncols()):
            AInt[ir, ik] = mpmath.mpi(A[ir, ik] - tol, A[ir, ik] + tol)
            AMp[ir, ik] = mpmath.mpf(A[ir, ik])
    d = mpmath.det(AMp)
    if (silent > 1):
        print("det(A-as-mpmath)={0}".format(d))
    di = mpmath.det(AInt)
    if (silent > 1):
        print("det(A-as-interval)={0}".format(di))
    res['det'] = (RF(di.a), RF(di.b))

    filename = "PS_Gram" + stN + "-" + wt + ".txt"
    if (silent > 1):
        print("printing to file: {0}".format(filename))
    print_matrix_to_file(A, filename, 'A[' + str(N) + ']')
    if (silent > 1):
        print("A-A.transpose()={0}".format(norm(A - A.transpose())))
    B = A ^ -1
    #[d,B]=mat_inverse(A)
    if (silent > 1):
        print("A={0}".format(A.n(100)))
        print("det(A)={0}".format(di))
        print("Done making inverse!")
    #res['det']=d
    res['inv'] = B
    mpmath.mp.dps = dold
    filename = "PS_Gram-inv" + stN + "-" + wt + ".txt"
    print_matrix_to_file(B, filename, ' AI[' + str(N) + ']')
    # first make the filename
    s = '%.1e' % tol
    filename3 = "PS_Coeffs" + stN + "-" + wt + "-" + s + ".sobj"
    # If the file already exist we load it and append the new data
    if (silent > 0):
        print("saving data to: {0}".format(filename3))
    try:
        f = open(filename3, "read")
    except IOError:
        if (silent > 0):
            print("no file before!")
        # do nothing
    else:
        if silent > 0:
            print("file: {0} exists!".format(filename3))
        f.close()
        Cold = load(filename3)
        for key in Cold.keys():
            #                print"key:",key
            if key not in C:  # then we add it
                print("key:", key, " does not exist in the new version!")
                C[key] = Cold[key]
                save(C, filename3)
    ## Save the whole thing
    filename = "PS_all_gram" + stN + "-" + wt + ".sobj"
    save(res, filename)
    ## our work is completed and we can remove the file
    try:
        os.remove(filename_work)
    except os.error:
        print("Could not remove file: {0}".format(filename_work))
        pass
    return res
Example #20
0
from sage.all import EllipticCurve, load, save

from testcong import make_hash, test_cong, test_irred, report
from XE7 import test_isom
import sys

try:
    hashtab7_50 = load('hashtab7_50')
except IOError:
    hashtab7_50 = make_hash(7, 11, 500000, 50)
    hashtab7_50 = dict([(k, v) for k, v in hashtab7_50.items() if len(v) > 1])
    save(hashtab7_50, 'hashtab7_50')
    len(hashtab7_50)


def find_bad_pairs(ht=hashtab7_50):
    bad_pairs = []
    for s in ht.values():
        if len(s) > 1:
            E1 = EllipticCurve(s[0])
            for r in s[1:]:
                E2 = EllipticCurve(r)
                res, info = test_cong(7, E1, E2, mumax=10 ^ 7)
                if not res:
                    report(res, info, 7, s[0], r)
                    bad_pairs.append([s[0], r])
    return bad_pairs


# bad_pairs = find_bad_pairs(hashtab7_50)
# previous cell takes ages; this is the output
"""
 *  Initialization of bound functionality
 *
 *  Copyright (C) 2016-2017
 *            Edgar Costa      ([email protected])
 *            Davide Lombardo  ([email protected])
 *            Jeroen Sijsling  ([email protected])
 *
 *  See LICENSE.txt for license details.
"""

from sage.all import load

import os
__boundsdir__ = os.getcwd() + '/bounds/'

from sage.all import *
magma.AttachSpec(__boundsdir__ + "spec")
load(__boundsdir__ + "constants.sage");
load(__boundsdir__ + "DiscriminantBound.sage")
load(__boundsdir__ + "TwistPolynomials.sage")
load(__boundsdir__ + "NonQM.sage");
load(__boundsdir__ + "GeometricallyIrreducible.sage");
load(__boundsdir__ + "EndomorphismRankBound.sage")
load(__boundsdir__ + "NonIsogenous.sage")
load(__boundsdir__ + "Genus2Factors.sage")
load(__boundsdir__ + "PointCounting.sage")
load(__boundsdir__ + "NonSquareCM.sage")
load(__boundsdir__ + "ProductsEC.sage")
Example #22
0
def load_wts_brs(i, parity):
    brs = load(join(DATA_DIR, "str%s_%s_brs.sobj" % (i, parity)))
    wts = load(join(DATA_DIR, "str%s_%s_weights.sobj" % (i, parity)))
    return FormsData(wts, [to_pol_over_q(p) for p in brs])
"""
 *  Initialization
 *
 *  Copyright (C) 2016-2017
 *            Edgar Costa      ([email protected])
 *            Davide Lombardo  ([email protected])
 *            Jeroen Sijsling  ([email protected])
 *
 *  See LICENSE.txt for license details.
"""

from sage.all import load

import os
__endodir__ = os.getcwd() + '/heuristic_endomorphisms/'

from sage.all import *
load(__endodir__ + "Initialize.sage")
Example #24
0
 def load_from(cls, filename):
     data_dict = load(filename)
     return cls._from_dict_to_object(data_dict)
Example #25
0
def load_tangles(fname):
    return list(reanimate_tangle(*x) for x in sg.load(fname))
Example #26
0
def _load(filename):
    # TODO: need to support load from http:// url
    base, ext = os.path.splitext(filename)
    if ext == '.py':
        module_name = base.replace('/','.')
        if module_name in sys.modules.keys():
            module = reload(sys.modules[module_name])
        else:
            module = __import__(module_name)
        return module
    
    elif ext in ['.pyx', '.spyx']:
        module_name = base.replace('/','.')
        import sage.misc.cython
        # todo -- make this work if file isn't in current dir

        # We have to use a tempfile since the cython command modifies the input file
        d = tempfile.mkdtemp()
        target = os.path.join(d, filename)
        
        if ext == '.pyx':
            # do not preparse
            shutil.copyfile(filename, target)
        else:
            # preparse
            import sage.misc.preparser            
            content = '#autogenerated\nfrom sage.all import *\n'
            content += sage.misc.preparser.preparse_file(open(filename).read())
            open(target,'w').write(content)
        
        do_reload = module_name in sys.modules.keys()
        tmp_name, tmp_build_dir = sage.misc.cython.cython(target, create_local_so_file = not do_reload)
        os.unlink(target)
        os.rmdir(d)
        if do_reload:
            sys.path.append(tmp_build_dir)
            module = __import__(tmp_name)
        else:
            module = __import__(module_name)
        return module
    
    elif ext == '.sage':
        import sage.misc.preparser
        content = '#autogenerated\nfrom sage.all import *\n'
        content += sage.misc.preparser.preparse_file(open(filename).read())
        pyfile = base + '.py'
        if os.path.exists(pyfile) and open(pyfile).readline() != '#autogenerated\n':
            raise RuntimeError, 'refusing to overwrite non-autogenerated file "%s"'%pyfile
        sys.path.append(os.path.split(base)[0])
        open(pyfile,'w').write(content)
        module_name = base.replace('/','.')
        if module_name in sys.modules.keys():
            module = reload(sys.modules[module_name])
        else:
            module = __import__(module_name)
        sage_all = [(s, id(s)) for s in sage.all.__dict__]
        for s in module.__dict__.keys():
            if not s.startswith('_') and (s, id(s)) in sage_all:
                del module.__dict__[s]
        #new_symbols = [s for s in module.__dict__ if (s, id(s)) not in sage_all]
        #return ModuleWrapper(new_symbols, module, filename)
        return module
    else:
        # fallback to sage's load
        from sage.all import load
        return load(filename)
Example #27
0
#!/usr/bin/env sage

import argparse
import gzip
import os
from sage.all import load, gap
wd = os.path.dirname(os.path.realpath(__file__))
load(wd+'/load-deps.py')
load(wd+'/tangle-counter.py')

parser = argparse.ArgumentParser(description='Find commute times per tangle',
                                 prog='tangle-time-count.py')

parser.add_argument('tracefiles', nargs='+')
parser.add_argument(
    '-n',
    help='Number of leaves',
    required=True)
parser.add_argument('--oaccess', help='Access out path', required=True)
parser.add_argument('--otangle', help='Tangle out path', required=True)
parser.add_argument('--asymmetric', action='store_true',
                    help='Generate tangles without exchange symmetry.')

args = parser.parse_args()

tc = TangleCounter(int(args.n), not args.asymmetric)

with gzip.GzipFile(args.oaccess, mode='wb', mtime=0.) as walk_out, \
    open(args.otangle, mode='w') as tangle_out:
    for trace in args.tracefiles:
        print trace